mirror of https://github.com/docker/docs.git
Merge pull request #1927 from mnowster/validate-extended-services
Validate and interpolate extended services
This commit is contained in:
commit
0060437f57
|
@ -10,7 +10,10 @@ from .errors import CircularReference
|
||||||
from .errors import ComposeFileNotFound
|
from .errors import ComposeFileNotFound
|
||||||
from .errors import ConfigurationError
|
from .errors import ConfigurationError
|
||||||
from .interpolation import interpolate_environment_variables
|
from .interpolation import interpolate_environment_variables
|
||||||
from .validation import validate_against_schema
|
from .validation import validate_against_fields_schema
|
||||||
|
from .validation import validate_against_service_schema
|
||||||
|
from .validation import validate_extended_service_exists
|
||||||
|
from .validation import validate_extends_file_path
|
||||||
from .validation import validate_service_names
|
from .validation import validate_service_names
|
||||||
from .validation import validate_top_level_object
|
from .validation import validate_top_level_object
|
||||||
from compose.cli.utils import find_candidates_in_parent_dirs
|
from compose.cli.utils import find_candidates_in_parent_dirs
|
||||||
|
@ -137,13 +140,17 @@ def load(config_details):
|
||||||
config, working_dir, filename = config_details
|
config, working_dir, filename = config_details
|
||||||
|
|
||||||
processed_config = pre_process_config(config)
|
processed_config = pre_process_config(config)
|
||||||
validate_against_schema(processed_config)
|
validate_against_fields_schema(processed_config)
|
||||||
|
|
||||||
service_dicts = []
|
service_dicts = []
|
||||||
|
|
||||||
for service_name, service_dict in list(processed_config.items()):
|
for service_name, service_dict in list(processed_config.items()):
|
||||||
loader = ServiceLoader(working_dir=working_dir, filename=filename)
|
loader = ServiceLoader(
|
||||||
service_dict = loader.make_service_dict(service_name, service_dict)
|
working_dir=working_dir,
|
||||||
|
filename=filename,
|
||||||
|
service_name=service_name,
|
||||||
|
service_dict=service_dict)
|
||||||
|
service_dict = loader.make_service_dict()
|
||||||
validate_paths(service_dict)
|
validate_paths(service_dict)
|
||||||
service_dicts.append(service_dict)
|
service_dicts.append(service_dict)
|
||||||
|
|
||||||
|
@ -151,84 +158,117 @@ def load(config_details):
|
||||||
|
|
||||||
|
|
||||||
class ServiceLoader(object):
|
class ServiceLoader(object):
|
||||||
def __init__(self, working_dir, filename=None, already_seen=None):
|
def __init__(self, working_dir, filename, service_name, service_dict, already_seen=None):
|
||||||
|
if working_dir is None:
|
||||||
|
raise Exception("No working_dir passed to ServiceLoader()")
|
||||||
|
|
||||||
self.working_dir = os.path.abspath(working_dir)
|
self.working_dir = os.path.abspath(working_dir)
|
||||||
|
|
||||||
if filename:
|
if filename:
|
||||||
self.filename = os.path.abspath(filename)
|
self.filename = os.path.abspath(filename)
|
||||||
else:
|
else:
|
||||||
self.filename = filename
|
self.filename = filename
|
||||||
self.already_seen = already_seen or []
|
self.already_seen = already_seen or []
|
||||||
|
self.service_dict = service_dict.copy()
|
||||||
|
self.service_name = service_name
|
||||||
|
self.service_dict['name'] = service_name
|
||||||
|
|
||||||
def detect_cycle(self, name):
|
def detect_cycle(self, name):
|
||||||
if self.signature(name) in self.already_seen:
|
if self.signature(name) in self.already_seen:
|
||||||
raise CircularReference(self.already_seen + [self.signature(name)])
|
raise CircularReference(self.already_seen + [self.signature(name)])
|
||||||
|
|
||||||
def make_service_dict(self, name, service_dict):
|
def make_service_dict(self):
|
||||||
service_dict = service_dict.copy()
|
self.resolve_environment()
|
||||||
service_dict['name'] = name
|
if 'extends' in self.service_dict:
|
||||||
service_dict = resolve_environment(service_dict, working_dir=self.working_dir)
|
self.validate_and_construct_extends()
|
||||||
service_dict = self.resolve_extends(service_dict)
|
self.service_dict = self.resolve_extends()
|
||||||
return process_container_options(service_dict, working_dir=self.working_dir)
|
|
||||||
|
|
||||||
def resolve_extends(self, service_dict):
|
if not self.already_seen:
|
||||||
if 'extends' not in service_dict:
|
validate_against_service_schema(self.service_dict, self.service_name)
|
||||||
return service_dict
|
|
||||||
|
|
||||||
extends_options = self.validate_extends_options(service_dict['name'], service_dict['extends'])
|
return process_container_options(self.service_dict, working_dir=self.working_dir)
|
||||||
|
|
||||||
if self.working_dir is None:
|
def resolve_environment(self):
|
||||||
raise Exception("No working_dir passed to ServiceLoader()")
|
"""
|
||||||
|
Unpack any environment variables from an env_file, if set.
|
||||||
|
Interpolate environment values if set.
|
||||||
|
"""
|
||||||
|
if 'environment' not in self.service_dict and 'env_file' not in self.service_dict:
|
||||||
|
return
|
||||||
|
|
||||||
if 'file' in extends_options:
|
env = {}
|
||||||
extends_from_filename = extends_options['file']
|
|
||||||
other_config_path = expand_path(self.working_dir, extends_from_filename)
|
if 'env_file' in self.service_dict:
|
||||||
else:
|
for f in get_env_files(self.service_dict, working_dir=self.working_dir):
|
||||||
other_config_path = self.filename
|
env.update(env_vars_from_file(f))
|
||||||
|
del self.service_dict['env_file']
|
||||||
|
|
||||||
|
env.update(parse_environment(self.service_dict.get('environment')))
|
||||||
|
env = dict(resolve_env_var(k, v) for k, v in six.iteritems(env))
|
||||||
|
|
||||||
|
self.service_dict['environment'] = env
|
||||||
|
|
||||||
|
def validate_and_construct_extends(self):
|
||||||
|
validate_extends_file_path(
|
||||||
|
self.service_name,
|
||||||
|
self.service_dict['extends'],
|
||||||
|
self.filename
|
||||||
|
)
|
||||||
|
self.extended_config_path = self.get_extended_config_path(
|
||||||
|
self.service_dict['extends']
|
||||||
|
)
|
||||||
|
self.extended_service_name = self.service_dict['extends']['service']
|
||||||
|
|
||||||
|
full_extended_config = pre_process_config(
|
||||||
|
load_yaml(self.extended_config_path)
|
||||||
|
)
|
||||||
|
|
||||||
|
validate_extended_service_exists(
|
||||||
|
self.extended_service_name,
|
||||||
|
full_extended_config,
|
||||||
|
self.extended_config_path
|
||||||
|
)
|
||||||
|
validate_against_fields_schema(full_extended_config)
|
||||||
|
|
||||||
|
self.extended_config = full_extended_config[self.extended_service_name]
|
||||||
|
|
||||||
|
def resolve_extends(self):
|
||||||
|
other_working_dir = os.path.dirname(self.extended_config_path)
|
||||||
|
other_already_seen = self.already_seen + [self.signature(self.service_name)]
|
||||||
|
|
||||||
other_working_dir = os.path.dirname(other_config_path)
|
|
||||||
other_already_seen = self.already_seen + [self.signature(service_dict['name'])]
|
|
||||||
other_loader = ServiceLoader(
|
other_loader = ServiceLoader(
|
||||||
working_dir=other_working_dir,
|
working_dir=other_working_dir,
|
||||||
filename=other_config_path,
|
filename=self.extended_config_path,
|
||||||
|
service_name=self.service_name,
|
||||||
|
service_dict=self.extended_config,
|
||||||
already_seen=other_already_seen,
|
already_seen=other_already_seen,
|
||||||
)
|
)
|
||||||
|
|
||||||
base_service = extends_options['service']
|
other_loader.detect_cycle(self.extended_service_name)
|
||||||
other_config = load_yaml(other_config_path)
|
other_service_dict = other_loader.make_service_dict()
|
||||||
|
|
||||||
if base_service not in other_config:
|
|
||||||
msg = (
|
|
||||||
"Cannot extend service '%s' in %s: Service not found"
|
|
||||||
) % (base_service, other_config_path)
|
|
||||||
raise ConfigurationError(msg)
|
|
||||||
|
|
||||||
other_service_dict = other_config[base_service]
|
|
||||||
other_loader.detect_cycle(extends_options['service'])
|
|
||||||
other_service_dict = other_loader.make_service_dict(
|
|
||||||
service_dict['name'],
|
|
||||||
other_service_dict,
|
|
||||||
)
|
|
||||||
validate_extended_service_dict(
|
validate_extended_service_dict(
|
||||||
other_service_dict,
|
other_service_dict,
|
||||||
filename=other_config_path,
|
filename=self.extended_config_path,
|
||||||
service=extends_options['service'],
|
service=self.extended_service_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
return merge_service_dicts(other_service_dict, service_dict)
|
return merge_service_dicts(other_service_dict, self.service_dict)
|
||||||
|
|
||||||
|
def get_extended_config_path(self, extends_options):
|
||||||
|
"""
|
||||||
|
Service we are extending either has a value for 'file' set, which we
|
||||||
|
need to obtain a full path too or we are extending from a service
|
||||||
|
defined in our own file.
|
||||||
|
"""
|
||||||
|
if 'file' in extends_options:
|
||||||
|
extends_from_filename = extends_options['file']
|
||||||
|
return expand_path(self.working_dir, extends_from_filename)
|
||||||
|
|
||||||
|
return self.filename
|
||||||
|
|
||||||
def signature(self, name):
|
def signature(self, name):
|
||||||
return (self.filename, name)
|
return (self.filename, name)
|
||||||
|
|
||||||
def validate_extends_options(self, service_name, extends_options):
|
|
||||||
error_prefix = "Invalid 'extends' configuration for %s:" % service_name
|
|
||||||
|
|
||||||
if 'file' not in extends_options and self.filename is None:
|
|
||||||
raise ConfigurationError(
|
|
||||||
"%s you need to specify a 'file', e.g. 'file: something.yml'" % error_prefix
|
|
||||||
)
|
|
||||||
|
|
||||||
return extends_options
|
|
||||||
|
|
||||||
|
|
||||||
def validate_extended_service_dict(service_dict, filename, service):
|
def validate_extended_service_dict(service_dict, filename, service):
|
||||||
error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
|
error_prefix = "Cannot extend service '%s' in %s:" % (service, filename)
|
||||||
|
@ -320,9 +360,6 @@ def get_env_files(options, working_dir=None):
|
||||||
if 'env_file' not in options:
|
if 'env_file' not in options:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
if working_dir is None:
|
|
||||||
raise Exception("No working_dir passed to get_env_files()")
|
|
||||||
|
|
||||||
env_files = options.get('env_file', [])
|
env_files = options.get('env_file', [])
|
||||||
if not isinstance(env_files, list):
|
if not isinstance(env_files, list):
|
||||||
env_files = [env_files]
|
env_files = [env_files]
|
||||||
|
@ -330,26 +367,6 @@ def get_env_files(options, working_dir=None):
|
||||||
return [expand_path(working_dir, path) for path in env_files]
|
return [expand_path(working_dir, path) for path in env_files]
|
||||||
|
|
||||||
|
|
||||||
def resolve_environment(service_dict, working_dir=None):
|
|
||||||
service_dict = service_dict.copy()
|
|
||||||
|
|
||||||
if 'environment' not in service_dict and 'env_file' not in service_dict:
|
|
||||||
return service_dict
|
|
||||||
|
|
||||||
env = {}
|
|
||||||
|
|
||||||
if 'env_file' in service_dict:
|
|
||||||
for f in get_env_files(service_dict, working_dir=working_dir):
|
|
||||||
env.update(env_vars_from_file(f))
|
|
||||||
del service_dict['env_file']
|
|
||||||
|
|
||||||
env.update(parse_environment(service_dict.get('environment')))
|
|
||||||
env = dict(resolve_env_var(k, v) for k, v in six.iteritems(env))
|
|
||||||
|
|
||||||
service_dict['environment'] = env
|
|
||||||
return service_dict
|
|
||||||
|
|
||||||
|
|
||||||
def parse_environment(environment):
|
def parse_environment(environment):
|
||||||
if not environment:
|
if not environment:
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -19,7 +19,12 @@
|
||||||
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
"cap_drop": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
"command": {"$ref": "#/definitions/string_or_list"},
|
"command": {"$ref": "#/definitions/string_or_list"},
|
||||||
"container_name": {"type": "string"},
|
"container_name": {"type": "string"},
|
||||||
"cpu_shares": {"type": "string"},
|
"cpu_shares": {
|
||||||
|
"oneOf": [
|
||||||
|
{"type": "number"},
|
||||||
|
{"type": "string"}
|
||||||
|
]
|
||||||
|
},
|
||||||
"cpuset": {"type": "string"},
|
"cpuset": {"type": "string"},
|
||||||
"detach": {"type": "boolean"},
|
"detach": {"type": "boolean"},
|
||||||
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
"devices": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
|
@ -27,7 +32,7 @@
|
||||||
"dns_search": {"$ref": "#/definitions/string_or_list"},
|
"dns_search": {"$ref": "#/definitions/string_or_list"},
|
||||||
"dockerfile": {"type": "string"},
|
"dockerfile": {"type": "string"},
|
||||||
"domainname": {"type": "string"},
|
"domainname": {"type": "string"},
|
||||||
"entrypoint": {"type": "string"},
|
"entrypoint": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
"env_file": {"$ref": "#/definitions/string_or_list"},
|
"env_file": {"$ref": "#/definitions/string_or_list"},
|
||||||
|
|
||||||
"environment": {
|
"environment": {
|
||||||
|
@ -75,7 +80,7 @@
|
||||||
},
|
},
|
||||||
"name": {"type": "string"},
|
"name": {"type": "string"},
|
||||||
"net": {"type": "string"},
|
"net": {"type": "string"},
|
||||||
"pid": {"type": "string"},
|
"pid": {"type": ["string", "null"]},
|
||||||
|
|
||||||
"ports": {
|
"ports": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
@ -94,36 +99,19 @@
|
||||||
"uniqueItems": true
|
"uniqueItems": true
|
||||||
},
|
},
|
||||||
|
|
||||||
"privileged": {"type": "string"},
|
"privileged": {"type": "boolean"},
|
||||||
"read_only": {"type": "boolean"},
|
"read_only": {"type": "boolean"},
|
||||||
"restart": {"type": "string"},
|
"restart": {"type": "string"},
|
||||||
"security_opt": {"type": "string"},
|
"security_opt": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
"stdin_open": {"type": "string"},
|
"stdin_open": {"type": "string"},
|
||||||
"tty": {"type": "string"},
|
"tty": {"type": "string"},
|
||||||
"user": {"type": "string"},
|
"user": {"type": "string"},
|
||||||
"volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
"volumes": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
|
"volume_driver": {"type": "string"},
|
||||||
"volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
"volumes_from": {"type": "array", "items": {"type": "string"}, "uniqueItems": true},
|
||||||
"working_dir": {"type": "string"}
|
"working_dir": {"type": "string"}
|
||||||
},
|
},
|
||||||
|
|
||||||
"anyOf": [
|
|
||||||
{
|
|
||||||
"required": ["build"],
|
|
||||||
"not": {"required": ["image"]}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"required": ["image"],
|
|
||||||
"not": {"anyOf": [
|
|
||||||
{"required": ["build"]},
|
|
||||||
{"required": ["dockerfile"]}
|
|
||||||
]}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"required": ["extends"],
|
|
||||||
"not": {"required": ["build", "image"]}
|
|
||||||
}
|
|
||||||
],
|
|
||||||
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"memswap_limit": ["mem_limit"]
|
"memswap_limit": ["mem_limit"]
|
||||||
},
|
},
|
|
@ -0,0 +1,39 @@
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-04/schema#",
|
||||||
|
|
||||||
|
"type": "object",
|
||||||
|
|
||||||
|
"properties": {
|
||||||
|
"name": {"type": "string"}
|
||||||
|
},
|
||||||
|
|
||||||
|
"required": ["name"],
|
||||||
|
|
||||||
|
"allOf": [
|
||||||
|
{"$ref": "fields_schema.json#/definitions/service"},
|
||||||
|
{"$ref": "#/definitions/service_constraints"}
|
||||||
|
],
|
||||||
|
|
||||||
|
"definitions": {
|
||||||
|
"service_constraints": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"required": ["build"],
|
||||||
|
"not": {"required": ["image"]}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"required": ["image"],
|
||||||
|
"not": {"anyOf": [
|
||||||
|
{"required": ["build"]},
|
||||||
|
{"required": ["dockerfile"]}
|
||||||
|
]}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"required": ["extends"],
|
||||||
|
"not": {"required": ["build", "image"]}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -5,6 +5,7 @@ from functools import wraps
|
||||||
from docker.utils.ports import split_port
|
from docker.utils.ports import split_port
|
||||||
from jsonschema import Draft4Validator
|
from jsonschema import Draft4Validator
|
||||||
from jsonschema import FormatChecker
|
from jsonschema import FormatChecker
|
||||||
|
from jsonschema import RefResolver
|
||||||
from jsonschema import ValidationError
|
from jsonschema import ValidationError
|
||||||
|
|
||||||
from .errors import ConfigurationError
|
from .errors import ConfigurationError
|
||||||
|
@ -66,6 +67,27 @@ def validate_top_level_object(func):
|
||||||
return func_wrapper
|
return func_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def validate_extends_file_path(service_name, extends_options, filename):
|
||||||
|
"""
|
||||||
|
The service to be extended must either be defined in the config key 'file',
|
||||||
|
or within 'filename'.
|
||||||
|
"""
|
||||||
|
error_prefix = "Invalid 'extends' configuration for %s:" % service_name
|
||||||
|
|
||||||
|
if 'file' not in extends_options and filename is None:
|
||||||
|
raise ConfigurationError(
|
||||||
|
"%s you need to specify a 'file', e.g. 'file: something.yml'" % error_prefix
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_extended_service_exists(extended_service_name, full_extended_config, extended_config_path):
|
||||||
|
if extended_service_name not in full_extended_config:
|
||||||
|
msg = (
|
||||||
|
"Cannot extend service '%s' in %s: Service not found"
|
||||||
|
) % (extended_service_name, extended_config_path)
|
||||||
|
raise ConfigurationError(msg)
|
||||||
|
|
||||||
|
|
||||||
def get_unsupported_config_msg(service_name, error_key):
|
def get_unsupported_config_msg(service_name, error_key):
|
||||||
msg = "Unsupported config option for '{}' service: '{}'".format(service_name, error_key)
|
msg = "Unsupported config option for '{}' service: '{}'".format(service_name, error_key)
|
||||||
if error_key in DOCKER_CONFIG_HINTS:
|
if error_key in DOCKER_CONFIG_HINTS:
|
||||||
|
@ -73,7 +95,7 @@ def get_unsupported_config_msg(service_name, error_key):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
|
||||||
def process_errors(errors):
|
def process_errors(errors, service_name=None):
|
||||||
"""
|
"""
|
||||||
jsonschema gives us an error tree full of information to explain what has
|
jsonschema gives us an error tree full of information to explain what has
|
||||||
gone wrong. Process each error and pull out relevant information and re-write
|
gone wrong. Process each error and pull out relevant information and re-write
|
||||||
|
@ -103,7 +125,7 @@ def process_errors(errors):
|
||||||
|
|
||||||
for error in errors:
|
for error in errors:
|
||||||
# handle root level errors
|
# handle root level errors
|
||||||
if len(error.path) == 0:
|
if len(error.path) == 0 and not error.instance.get('name'):
|
||||||
if error.validator == 'type':
|
if error.validator == 'type':
|
||||||
msg = "Top level object needs to be a dictionary. Check your .yml file that you have defined a service at the top level."
|
msg = "Top level object needs to be a dictionary. Check your .yml file that you have defined a service at the top level."
|
||||||
root_msgs.append(msg)
|
root_msgs.append(msg)
|
||||||
|
@ -115,11 +137,14 @@ def process_errors(errors):
|
||||||
root_msgs.append(_clean_error_message(error.message))
|
root_msgs.append(_clean_error_message(error.message))
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# handle service level errors
|
if not service_name:
|
||||||
service_name = error.path[0]
|
# field_schema errors will have service name on the path
|
||||||
|
service_name = error.path[0]
|
||||||
# pop the service name off our path
|
error.path.popleft()
|
||||||
error.path.popleft()
|
else:
|
||||||
|
# service_schema errors have the service name passed in, as that
|
||||||
|
# is not available on error.path or necessarily error.instance
|
||||||
|
service_name = service_name
|
||||||
|
|
||||||
if error.validator == 'additionalProperties':
|
if error.validator == 'additionalProperties':
|
||||||
invalid_config_key = _parse_key_from_error_msg(error)
|
invalid_config_key = _parse_key_from_error_msg(error)
|
||||||
|
@ -189,16 +214,27 @@ def process_errors(errors):
|
||||||
return "\n".join(root_msgs + invalid_keys + required + type_errors + other_errors)
|
return "\n".join(root_msgs + invalid_keys + required + type_errors + other_errors)
|
||||||
|
|
||||||
|
|
||||||
def validate_against_schema(config):
|
def validate_against_fields_schema(config):
|
||||||
|
schema_filename = "fields_schema.json"
|
||||||
|
return _validate_against_schema(config, schema_filename)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_against_service_schema(config, service_name):
|
||||||
|
schema_filename = "service_schema.json"
|
||||||
|
return _validate_against_schema(config, schema_filename, service_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_against_schema(config, schema_filename, service_name=None):
|
||||||
config_source_dir = os.path.dirname(os.path.abspath(__file__))
|
config_source_dir = os.path.dirname(os.path.abspath(__file__))
|
||||||
schema_file = os.path.join(config_source_dir, "schema.json")
|
schema_file = os.path.join(config_source_dir, schema_filename)
|
||||||
|
|
||||||
with open(schema_file, "r") as schema_fh:
|
with open(schema_file, "r") as schema_fh:
|
||||||
schema = json.load(schema_fh)
|
schema = json.load(schema_fh)
|
||||||
|
|
||||||
validation_output = Draft4Validator(schema, format_checker=FormatChecker(["ports"]))
|
resolver = RefResolver('file://' + config_source_dir + '/', schema)
|
||||||
|
validation_output = Draft4Validator(schema, resolver=resolver, format_checker=FormatChecker(["ports"]))
|
||||||
|
|
||||||
errors = [error for error in sorted(validation_output.iter_errors(config), key=str)]
|
errors = [error for error in sorted(validation_output.iter_errors(config), key=str)]
|
||||||
if errors:
|
if errors:
|
||||||
error_msg = process_errors(errors)
|
error_msg = process_errors(errors, service_name)
|
||||||
raise ConfigurationError("Validation failed, reason(s):\n{}".format(error_msg))
|
raise ConfigurationError("Validation failed, reason(s):\n{}".format(error_msg))
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
myweb:
|
||||||
|
build: '.'
|
||||||
|
extends:
|
||||||
|
service: web
|
||||||
|
command: top
|
||||||
|
web:
|
||||||
|
build: '.'
|
||||||
|
links:
|
||||||
|
- "mydb:db"
|
|
@ -0,0 +1,8 @@
|
||||||
|
myweb:
|
||||||
|
build: '.'
|
||||||
|
extends:
|
||||||
|
service: web
|
||||||
|
command: top
|
||||||
|
web:
|
||||||
|
build: '.'
|
||||||
|
net: "container:db"
|
|
@ -0,0 +1,9 @@
|
||||||
|
myweb:
|
||||||
|
build: '.'
|
||||||
|
extends:
|
||||||
|
service: web
|
||||||
|
command: top
|
||||||
|
web:
|
||||||
|
build: '.'
|
||||||
|
volumes_from:
|
||||||
|
- "db"
|
|
@ -0,0 +1,4 @@
|
||||||
|
myweb:
|
||||||
|
extends:
|
||||||
|
file: valid-composite-extends.yml
|
||||||
|
service: web
|
|
@ -0,0 +1,5 @@
|
||||||
|
myweb:
|
||||||
|
build: '.'
|
||||||
|
extends:
|
||||||
|
file: 'valid-composite-extends.yml'
|
||||||
|
service: web
|
|
@ -0,0 +1,6 @@
|
||||||
|
myweb:
|
||||||
|
build: '.'
|
||||||
|
extends:
|
||||||
|
file: valid-common.yml
|
||||||
|
service: common-config
|
||||||
|
command: top
|
|
@ -0,0 +1,3 @@
|
||||||
|
common-config:
|
||||||
|
environment:
|
||||||
|
- FOO=1
|
|
@ -0,0 +1,2 @@
|
||||||
|
web:
|
||||||
|
command: top
|
|
@ -0,0 +1,3 @@
|
||||||
|
web:
|
||||||
|
build: '.'
|
||||||
|
hostname: "host-${HOSTNAME_VALUE}"
|
|
@ -0,0 +1,5 @@
|
||||||
|
myweb:
|
||||||
|
extends:
|
||||||
|
service: web
|
||||||
|
file: valid-interpolation-2.yml
|
||||||
|
command: top
|
|
@ -165,16 +165,6 @@ class ServiceTest(DockerClientTestCase):
|
||||||
service.start_container(container)
|
service.start_container(container)
|
||||||
self.assertEqual(set(container.get('HostConfig.ExtraHosts')), set(extra_hosts))
|
self.assertEqual(set(container.get('HostConfig.ExtraHosts')), set(extra_hosts))
|
||||||
|
|
||||||
def test_create_container_with_extra_hosts_string(self):
|
|
||||||
extra_hosts = 'somehost:162.242.195.82'
|
|
||||||
service = self.create_service('db', extra_hosts=extra_hosts)
|
|
||||||
self.assertRaises(ConfigError, lambda: service.create_container())
|
|
||||||
|
|
||||||
def test_create_container_with_extra_hosts_list_of_dicts(self):
|
|
||||||
extra_hosts = [{'somehost': '162.242.195.82'}, {'otherhost': '50.31.209.229'}]
|
|
||||||
service = self.create_service('db', extra_hosts=extra_hosts)
|
|
||||||
self.assertRaises(ConfigError, lambda: service.create_container())
|
|
||||||
|
|
||||||
def test_create_container_with_extra_hosts_dicts(self):
|
def test_create_container_with_extra_hosts_dicts(self):
|
||||||
extra_hosts = {'somehost': '162.242.195.82', 'otherhost': '50.31.209.229'}
|
extra_hosts = {'somehost': '162.242.195.82', 'otherhost': '50.31.209.229'}
|
||||||
extra_hosts_list = ['somehost:162.242.195.82', 'otherhost:50.31.209.229']
|
extra_hosts_list = ['somehost:162.242.195.82', 'otherhost:50.31.209.229']
|
||||||
|
@ -515,7 +505,7 @@ class ServiceTest(DockerClientTestCase):
|
||||||
self.assertEqual(container['HostConfig']['Privileged'], True)
|
self.assertEqual(container['HostConfig']['Privileged'], True)
|
||||||
|
|
||||||
def test_expose_does_not_publish_ports(self):
|
def test_expose_does_not_publish_ports(self):
|
||||||
service = self.create_service('web', expose=[8000])
|
service = self.create_service('web', expose=["8000"])
|
||||||
container = create_and_start_container(service).inspect()
|
container = create_and_start_container(service).inspect()
|
||||||
self.assertEqual(container['NetworkSettings']['Ports'], {'8000/tcp': None})
|
self.assertEqual(container['NetworkSettings']['Ports'], {'8000/tcp': None})
|
||||||
|
|
||||||
|
|
|
@ -31,11 +31,29 @@ class DockerClientTestCase(unittest.TestCase):
|
||||||
if 'command' not in kwargs:
|
if 'command' not in kwargs:
|
||||||
kwargs['command'] = ["top"]
|
kwargs['command'] = ["top"]
|
||||||
|
|
||||||
options = ServiceLoader(working_dir='.').make_service_dict(name, kwargs)
|
links = kwargs.get('links', None)
|
||||||
|
volumes_from = kwargs.get('volumes_from', None)
|
||||||
|
net = kwargs.get('net', None)
|
||||||
|
|
||||||
|
workaround_options = ['links', 'volumes_from', 'net']
|
||||||
|
for key in workaround_options:
|
||||||
|
try:
|
||||||
|
del kwargs[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
options = ServiceLoader(working_dir='.', filename=None, service_name=name, service_dict=kwargs).make_service_dict()
|
||||||
|
|
||||||
labels = options.setdefault('labels', {})
|
labels = options.setdefault('labels', {})
|
||||||
labels['com.docker.compose.test-name'] = self.id()
|
labels['com.docker.compose.test-name'] = self.id()
|
||||||
|
|
||||||
|
if links:
|
||||||
|
options['links'] = links
|
||||||
|
if volumes_from:
|
||||||
|
options['volumes_from'] = volumes_from
|
||||||
|
if net:
|
||||||
|
options['net'] = net
|
||||||
|
|
||||||
return Service(
|
return Service(
|
||||||
project='composetest',
|
project='composetest',
|
||||||
client=self.client,
|
client=self.client,
|
||||||
|
|
|
@ -11,11 +11,15 @@ from compose.config import config
|
||||||
from compose.config.errors import ConfigurationError
|
from compose.config.errors import ConfigurationError
|
||||||
|
|
||||||
|
|
||||||
def make_service_dict(name, service_dict, working_dir):
|
def make_service_dict(name, service_dict, working_dir, filename=None):
|
||||||
"""
|
"""
|
||||||
Test helper function to construct a ServiceLoader
|
Test helper function to construct a ServiceLoader
|
||||||
"""
|
"""
|
||||||
return config.ServiceLoader(working_dir=working_dir).make_service_dict(name, service_dict)
|
return config.ServiceLoader(
|
||||||
|
working_dir=working_dir,
|
||||||
|
filename=filename,
|
||||||
|
service_name=name,
|
||||||
|
service_dict=service_dict).make_service_dict()
|
||||||
|
|
||||||
|
|
||||||
def service_sort(services):
|
def service_sort(services):
|
||||||
|
@ -202,6 +206,39 @@ class ConfigTest(unittest.TestCase):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_config_extra_hosts_string_raises_validation_error(self):
|
||||||
|
expected_error_msg = "Service 'web' configuration key 'extra_hosts' contains an invalid type"
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
|
config.load(
|
||||||
|
config.ConfigDetails(
|
||||||
|
{'web': {
|
||||||
|
'image': 'busybox',
|
||||||
|
'extra_hosts': 'somehost:162.242.195.82'
|
||||||
|
}},
|
||||||
|
'working_dir',
|
||||||
|
'filename.yml'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_config_extra_hosts_list_of_dicts_validation_error(self):
|
||||||
|
expected_error_msg = "Service 'web' configuration key 'extra_hosts' contains an invalid type"
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
|
config.load(
|
||||||
|
config.ConfigDetails(
|
||||||
|
{'web': {
|
||||||
|
'image': 'busybox',
|
||||||
|
'extra_hosts': [
|
||||||
|
{'somehost': '162.242.195.82'},
|
||||||
|
{'otherhost': '50.31.209.229'}
|
||||||
|
]
|
||||||
|
}},
|
||||||
|
'working_dir',
|
||||||
|
'filename.yml'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class InterpolationTest(unittest.TestCase):
|
class InterpolationTest(unittest.TestCase):
|
||||||
@mock.patch.dict(os.environ)
|
@mock.patch.dict(os.environ)
|
||||||
|
@ -236,7 +273,7 @@ class InterpolationTest(unittest.TestCase):
|
||||||
'web': {
|
'web': {
|
||||||
'image': '${FOO}',
|
'image': '${FOO}',
|
||||||
'command': '${BAR}',
|
'command': '${BAR}',
|
||||||
'entrypoint': '${BAR}',
|
'container_name': '${BAR}',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
working_dir='.',
|
working_dir='.',
|
||||||
|
@ -282,12 +319,13 @@ class InterpolationTest(unittest.TestCase):
|
||||||
@mock.patch.dict(os.environ)
|
@mock.patch.dict(os.environ)
|
||||||
def test_volume_binding_with_home(self):
|
def test_volume_binding_with_home(self):
|
||||||
os.environ['HOME'] = '/home/user'
|
os.environ['HOME'] = '/home/user'
|
||||||
d = make_service_dict('foo', {'volumes': ['~:/container/path']}, working_dir='.')
|
d = make_service_dict('foo', {'build': '.', 'volumes': ['~:/container/path']}, working_dir='.')
|
||||||
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
|
self.assertEqual(d['volumes'], ['/home/user:/container/path'])
|
||||||
|
|
||||||
@mock.patch.dict(os.environ)
|
@mock.patch.dict(os.environ)
|
||||||
def test_volume_binding_with_local_dir_name_raises_warning(self):
|
def test_volume_binding_with_local_dir_name_raises_warning(self):
|
||||||
def make_dict(**config):
|
def make_dict(**config):
|
||||||
|
config['build'] = '.'
|
||||||
make_service_dict('foo', config, working_dir='.')
|
make_service_dict('foo', config, working_dir='.')
|
||||||
|
|
||||||
with mock.patch('compose.config.config.log.warn') as warn:
|
with mock.patch('compose.config.config.log.warn') as warn:
|
||||||
|
@ -332,6 +370,7 @@ class InterpolationTest(unittest.TestCase):
|
||||||
|
|
||||||
def test_named_volume_with_driver_does_not_expand(self):
|
def test_named_volume_with_driver_does_not_expand(self):
|
||||||
d = make_service_dict('foo', {
|
d = make_service_dict('foo', {
|
||||||
|
'build': '.',
|
||||||
'volumes': ['namedvolume:/data'],
|
'volumes': ['namedvolume:/data'],
|
||||||
'volume_driver': 'foodriver',
|
'volume_driver': 'foodriver',
|
||||||
}, working_dir='.')
|
}, working_dir='.')
|
||||||
|
@ -341,6 +380,7 @@ class InterpolationTest(unittest.TestCase):
|
||||||
def test_home_directory_with_driver_does_not_expand(self):
|
def test_home_directory_with_driver_does_not_expand(self):
|
||||||
os.environ['NAME'] = 'surprise!'
|
os.environ['NAME'] = 'surprise!'
|
||||||
d = make_service_dict('foo', {
|
d = make_service_dict('foo', {
|
||||||
|
'build': '.',
|
||||||
'volumes': ['~:/data'],
|
'volumes': ['~:/data'],
|
||||||
'volume_driver': 'foodriver',
|
'volume_driver': 'foodriver',
|
||||||
}, working_dir='.')
|
}, working_dir='.')
|
||||||
|
@ -500,36 +540,36 @@ class MergeLabelsTest(unittest.TestCase):
|
||||||
|
|
||||||
def test_no_override(self):
|
def test_no_override(self):
|
||||||
service_dict = config.merge_service_dicts(
|
service_dict = config.merge_service_dicts(
|
||||||
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
|
||||||
make_service_dict('foo', {}, 'tests/'),
|
make_service_dict('foo', {'build': '.'}, 'tests/'),
|
||||||
)
|
)
|
||||||
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
|
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
|
||||||
|
|
||||||
def test_no_base(self):
|
def test_no_base(self):
|
||||||
service_dict = config.merge_service_dicts(
|
service_dict = config.merge_service_dicts(
|
||||||
make_service_dict('foo', {}, 'tests/'),
|
make_service_dict('foo', {'build': '.'}, 'tests/'),
|
||||||
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
|
||||||
)
|
)
|
||||||
self.assertEqual(service_dict['labels'], {'foo': '2'})
|
self.assertEqual(service_dict['labels'], {'foo': '2'})
|
||||||
|
|
||||||
def test_override_explicit_value(self):
|
def test_override_explicit_value(self):
|
||||||
service_dict = config.merge_service_dicts(
|
service_dict = config.merge_service_dicts(
|
||||||
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
|
||||||
make_service_dict('foo', {'labels': ['foo=2']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=2']}, 'tests/'),
|
||||||
)
|
)
|
||||||
self.assertEqual(service_dict['labels'], {'foo': '2', 'bar': ''})
|
self.assertEqual(service_dict['labels'], {'foo': '2', 'bar': ''})
|
||||||
|
|
||||||
def test_add_explicit_value(self):
|
def test_add_explicit_value(self):
|
||||||
service_dict = config.merge_service_dicts(
|
service_dict = config.merge_service_dicts(
|
||||||
make_service_dict('foo', {'labels': ['foo=1', 'bar']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar']}, 'tests/'),
|
||||||
make_service_dict('foo', {'labels': ['bar=2']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['bar=2']}, 'tests/'),
|
||||||
)
|
)
|
||||||
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': '2'})
|
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': '2'})
|
||||||
|
|
||||||
def test_remove_explicit_value(self):
|
def test_remove_explicit_value(self):
|
||||||
service_dict = config.merge_service_dicts(
|
service_dict = config.merge_service_dicts(
|
||||||
make_service_dict('foo', {'labels': ['foo=1', 'bar=2']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['foo=1', 'bar=2']}, 'tests/'),
|
||||||
make_service_dict('foo', {'labels': ['bar']}, 'tests/'),
|
make_service_dict('foo', {'build': '.', 'labels': ['bar']}, 'tests/'),
|
||||||
)
|
)
|
||||||
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
|
self.assertEqual(service_dict['labels'], {'foo': '1', 'bar': ''})
|
||||||
|
|
||||||
|
@ -611,6 +651,7 @@ class EnvTest(unittest.TestCase):
|
||||||
|
|
||||||
service_dict = make_service_dict(
|
service_dict = make_service_dict(
|
||||||
'foo', {
|
'foo', {
|
||||||
|
'build': '.',
|
||||||
'environment': {
|
'environment': {
|
||||||
'FILE_DEF': 'F1',
|
'FILE_DEF': 'F1',
|
||||||
'FILE_DEF_EMPTY': '',
|
'FILE_DEF_EMPTY': '',
|
||||||
|
@ -629,7 +670,7 @@ class EnvTest(unittest.TestCase):
|
||||||
def test_env_from_file(self):
|
def test_env_from_file(self):
|
||||||
service_dict = make_service_dict(
|
service_dict = make_service_dict(
|
||||||
'foo',
|
'foo',
|
||||||
{'env_file': 'one.env'},
|
{'build': '.', 'env_file': 'one.env'},
|
||||||
'tests/fixtures/env',
|
'tests/fixtures/env',
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -640,7 +681,7 @@ class EnvTest(unittest.TestCase):
|
||||||
def test_env_from_multiple_files(self):
|
def test_env_from_multiple_files(self):
|
||||||
service_dict = make_service_dict(
|
service_dict = make_service_dict(
|
||||||
'foo',
|
'foo',
|
||||||
{'env_file': ['one.env', 'two.env']},
|
{'build': '.', 'env_file': ['one.env', 'two.env']},
|
||||||
'tests/fixtures/env',
|
'tests/fixtures/env',
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -662,7 +703,7 @@ class EnvTest(unittest.TestCase):
|
||||||
os.environ['ENV_DEF'] = 'E3'
|
os.environ['ENV_DEF'] = 'E3'
|
||||||
service_dict = make_service_dict(
|
service_dict = make_service_dict(
|
||||||
'foo',
|
'foo',
|
||||||
{'env_file': 'resolve.env'},
|
{'build': '.', 'env_file': 'resolve.env'},
|
||||||
'tests/fixtures/env',
|
'tests/fixtures/env',
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
|
@ -862,6 +903,17 @@ class ExtendsTest(unittest.TestCase):
|
||||||
|
|
||||||
self.assertEquals(len(service), 1)
|
self.assertEquals(len(service), 1)
|
||||||
self.assertIsInstance(service[0], dict)
|
self.assertIsInstance(service[0], dict)
|
||||||
|
self.assertEquals(service[0]['command'], "/bin/true")
|
||||||
|
|
||||||
|
def test_extended_service_with_invalid_config(self):
|
||||||
|
expected_error_msg = "Service 'myweb' has neither an image nor a build path specified"
|
||||||
|
|
||||||
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
|
load_from_filename('tests/fixtures/extends/service-with-invalid-schema.yml')
|
||||||
|
|
||||||
|
def test_extended_service_with_valid_config(self):
|
||||||
|
service = load_from_filename('tests/fixtures/extends/service-with-valid-composite-extends.yml')
|
||||||
|
self.assertEquals(service[0]['command'], "top")
|
||||||
|
|
||||||
def test_extends_file_defaults_to_self(self):
|
def test_extends_file_defaults_to_self(self):
|
||||||
"""
|
"""
|
||||||
|
@ -887,37 +939,33 @@ class ExtendsTest(unittest.TestCase):
|
||||||
}
|
}
|
||||||
]))
|
]))
|
||||||
|
|
||||||
def test_blacklisted_options(self):
|
def test_invalid_links_in_extended_service(self):
|
||||||
def load_config():
|
expected_error_msg = "services with 'links' cannot be extended"
|
||||||
return make_service_dict('myweb', {
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
'extends': {
|
load_from_filename('tests/fixtures/extends/invalid-links.yml')
|
||||||
'file': 'whatever',
|
|
||||||
'service': 'web',
|
|
||||||
}
|
|
||||||
}, '.')
|
|
||||||
|
|
||||||
with self.assertRaisesRegexp(ConfigurationError, 'links'):
|
def test_invalid_volumes_from_in_extended_service(self):
|
||||||
other_config = {'web': {'links': ['db']}}
|
expected_error_msg = "services with 'volumes_from' cannot be extended"
|
||||||
|
|
||||||
with mock.patch.object(config, 'load_yaml', return_value=other_config):
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
print(load_config())
|
load_from_filename('tests/fixtures/extends/invalid-volumes.yml')
|
||||||
|
|
||||||
with self.assertRaisesRegexp(ConfigurationError, 'volumes_from'):
|
def test_invalid_net_in_extended_service(self):
|
||||||
other_config = {'web': {'volumes_from': ['db']}}
|
expected_error_msg = "services with 'net: container' cannot be extended"
|
||||||
|
|
||||||
with mock.patch.object(config, 'load_yaml', return_value=other_config):
|
with self.assertRaisesRegexp(ConfigurationError, expected_error_msg):
|
||||||
print(load_config())
|
load_from_filename('tests/fixtures/extends/invalid-net.yml')
|
||||||
|
|
||||||
with self.assertRaisesRegexp(ConfigurationError, 'net'):
|
@mock.patch.dict(os.environ)
|
||||||
other_config = {'web': {'net': 'container:db'}}
|
def test_valid_interpolation_in_extended_service(self):
|
||||||
|
os.environ.update(
|
||||||
|
HOSTNAME_VALUE="penguin",
|
||||||
|
)
|
||||||
|
expected_interpolated_value = "host-penguin"
|
||||||
|
|
||||||
with mock.patch.object(config, 'load_yaml', return_value=other_config):
|
service_dicts = load_from_filename('tests/fixtures/extends/valid-interpolation.yml')
|
||||||
print(load_config())
|
for service in service_dicts:
|
||||||
|
self.assertTrue(service['hostname'], expected_interpolated_value)
|
||||||
other_config = {'web': {'net': 'host'}}
|
|
||||||
|
|
||||||
with mock.patch.object(config, 'load_yaml', return_value=other_config):
|
|
||||||
print(load_config())
|
|
||||||
|
|
||||||
def test_volume_path(self):
|
def test_volume_path(self):
|
||||||
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
|
dicts = load_from_filename('tests/fixtures/volume-path/docker-compose.yml')
|
||||||
|
@ -949,6 +997,10 @@ class ExtendsTest(unittest.TestCase):
|
||||||
with self.assertRaisesRegexp(ConfigurationError, err_msg):
|
with self.assertRaisesRegexp(ConfigurationError, err_msg):
|
||||||
load_from_filename('tests/fixtures/extends/nonexistent-service.yml')
|
load_from_filename('tests/fixtures/extends/nonexistent-service.yml')
|
||||||
|
|
||||||
|
def test_partial_service_config_in_extends_is_still_valid(self):
|
||||||
|
dicts = load_from_filename('tests/fixtures/extends/valid-common-config.yml')
|
||||||
|
self.assertEqual(dicts[0]['environment'], {'FOO': '1'})
|
||||||
|
|
||||||
|
|
||||||
class BuildPathTest(unittest.TestCase):
|
class BuildPathTest(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
|
Loading…
Reference in New Issue