Python re.compile() Examples

The following are 30 code examples of re.compile(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module re , or try the search function .
Example #1
Source Project: drydock   Author: airshipit   File: base.py    License: Apache License 2.0 10 votes vote down vote up
def interpolate_url(self):
        pattern = r'\{([a-z_]+)\}'
        regex = re.compile(pattern)
        start = 0
        new_url = self.resource_url

        while (start + 1) < len(self.resource_url):
            match = regex.search(self.resource_url, start)
            if match is None:
                return new_url

            param = match.group(1)
            val = getattr(self, param, None)
            if val is None:
                raise ValueError("Missing variable value")
            new_url = new_url.replace('{' + param + '}', str(val))
            start = match.end(1) + 1

        return new_url 
Example #2
Source Project: aegea   Author: kislyuk   File: __init__.py    License: Apache License 2.0 8 votes vote down vote up
def validate_hostname(hostname):
    if len(hostname) > 255:
        raise Exception("Hostname {} is longer than 255 characters".format(hostname))
    if hostname[-1] == ".":
        hostname = hostname[:-1]
    allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
    if not all(allowed.match(x) for x in hostname.split(".")):
        raise Exception("Hostname {} is not RFC 1123 compliant".format(hostname)) 
Example #3
Source Project: drydock   Author: airshipit   File: test_validation_rule_storage_partitioning.py    License: Apache License 2.0 7 votes vote down vote up
def test_invalid_storage_partitioning(self, deckhand_ingester,
                                          drydock_state, input_files,
                                          mock_get_build_data):
        input_file = input_files.join("invalid_validation.yaml")

        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = StoragePartitioning()
        message_list = validator.execute(site_design)

        regex = re.compile('Volume group .+ not assigned any physical volumes')

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert len(msg.get('documents')) > 0
            assert msg.get('error')
            assert regex.search(msg.get('message')) is not None

        assert len(message_list) == 2 
Example #4
Source Project: drydock   Author: airshipit   File: test_validation_rule_storage_mountpoint.py    License: Apache License 2.0 7 votes vote down vote up
def test_invalid_partition_mountpoints(self, deckhand_ingester,
                                           drydock_state, input_files,
                                           mock_get_build_data):

        input_file = input_files.join("invalid_validation.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = StorageMountpoints()
        message_list = validator.execute(site_design, orchestrator=orch)

        regex = re.compile('Mountpoint .+ already exists')

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert regex.search(msg.get('message')) is not None
            assert msg.get('error') is True 
Example #5
Source Project: drydock   Author: airshipit   File: test_validation_rule_no_duplicate_IPs.py    License: Apache License 2.0 6 votes vote down vote up
def test_invalid_no_duplicate_IPs(self, input_files, drydock_state,
                                      deckhand_ingester):
        input_file = input_files.join("invalid_validation.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = NoDuplicateIpsCheck()
        message_list = validator.execute(site_design)

        regex = re.compile('Duplicate IP Address Found: [0-9.]+')

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert len(msg.get('documents')) > 0
            assert msg.get('error') is True
            assert regex.search(msg.get('message')) is not None 
Example #6
Source Project: drydock   Author: airshipit   File: test_validation_rule_storage_mountpoint.py    License: Apache License 2.0 6 votes vote down vote up
def test_invalid_vg_mountpoints(self, deckhand_ingester, drydock_state,
                                    input_files, mock_get_build_data):

        input_file = input_files.join("invalid_mountpoint.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = StorageMountpoints()
        message_list = validator.execute(site_design, orchestrator=orch)

        regex = re.compile('Mountpoint .+ already exists')

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert regex.search(msg.get('message')) is not None
            assert msg.get('error') is True 
Example #7
Source Project: drydock   Author: airshipit   File: test_validation_rule_boot_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_invalid_boot_storage_small(self, deckhand_ingester, drydock_state,
                                        input_files, mock_get_build_data):
        input_file = input_files.join("invalid_boot_storage_small.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = BootStorageRational()
        message_list = validator.execute(site_design, orchestrator=orch)

        regex = re.compile('.+ volume must be > .+GB')

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert len(msg.get('documents')) > 0
            assert regex.search(msg.get('message')) is not None
            assert msg.get('error')

        assert len(message_list) == 4 
Example #8
Source Project: drydock   Author: airshipit   File: test_validation_rule_network_cidr.py    License: Apache License 2.0 6 votes vote down vote up
def test_invalid_network_cidr(self, mocker, deckhand_ingester,
                                  drydock_state, input_files):

        input_file = input_files.join("invalid_network_cidr.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = CidrValidity()
        message_list = validator.execute(site_design, orchestrator=orch)
        msg = message_list[0].to_dict()

        regex_diagnostic = re.compile('Provide a CIDR acceptable by MAAS: .+')
        regex_message = re.compile('The provided CIDR .+ has host bits set')

        assert len(message_list) >= 1
        assert msg.get('error') is True
        assert any([
            regex_diagnostic.search(msg.get('diagnostic')),
            regex_message.search(msg.get('message'))
                  ]) 
Example #9
Source Project: cWMI   Author: fireeye   File: i_to_m.py    License: Apache License 2.0 6 votes vote down vote up
def convert_param(method, param):
    # remove notation, split by upper, convert to lowercase
    param_sanitized = param.replace('*', '')
    substr = param_sanitized
    try:
        substr = re.search('([A-Z]\w+)', param_sanitized).group(1)
    except:
        pass
    case_re = re.compile(r'((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))')
    converted_param = case_re.sub(r'_\1', substr).lower()
    if converted_param in keyword.kwlist or converted_param in dir(__builtins__):
        converted_param += '_param'
    # check for duplicates. if seen, append number to end
    if 'params' in method and len([param for param in method['params'] if param['name'] == converted_param]):
        param_names = [param['name'] for param in method['params']]
        for x in range(2, 10):
            count_name = '{:s}{:d}'.format(converted_param, x)
            if count_name not in param_names:
                converted_param = count_name
                break
    return converted_param 
Example #10
Source Project: neural-fingerprinting   Author: StephanZheng   File: docscrape.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _parse_summary(self):
        """Grab signature (if given) and summary"""
        summary = self._doc.read_to_next_empty_line()
        summary_str = "\n".join([s.strip() for s in summary])
        if re.compile('^([\\w. ]+=)?[\\w\\.]+\\(.*\\)$').match(summary_str):
            self['Signature'] = summary_str
            if not self._is_at_section():
                self['Summary'] = self._doc.read_to_next_empty_line()
        elif re.compile('^[\\w]+\n[-]+').match(summary_str):
            self['Summary'] = ''
            self._doc.reset()
        else:
            self['Summary'] = summary

        if not self._is_at_section():
            self['Extended Summary'] = self._read_to_next_section() 
Example #11
Source Project: Starx_Pixiv_Collector   Author: SuzukiHonoka   File: start.py    License: MIT License 6 votes vote down vote up
def format_pixiv_illust_original_url(id_url, mode=1):
    tag = 'Format_Pixiv_Illust_Original_Url'
    if mode == 1:
        contents = get_text_from_url(id_url)
        try:
            img_src_re = re.compile(r'\"urls\":{.*?}')
            img_src = img_src_re.findall(contents)
            final_dict = json.loads("{" + img_src[0] + "}")
            return final_dict['urls']['original']
        except Exception as e:
            print_with_tag(tag, "An error occurred when parsing the json file.")
            print_with_tag(tag, e)
    elif mode == 2:
        data_list = []
        json_datas = get_text_from_url(id_url)
        json_datas_format = json.loads(json_datas)['body']
        for urls in json_datas_format:
            data_list.append(urls['urls']['original'])
        return data_list 
Example #12
Source Project: multibootusb   Author: mbusb   File: update_cfg_file.py    License: GNU General Public License v2.0 6 votes vote down vote up
def tweak(self, content):
        tweaked = self.legacy_tweak(content)
        if tweaked:
            return tweaked
        apply_persistence_to_all_lines = \
            0 < self.setup_params.persistence_size and \
            not self.config_is_persistence_aware(content)
        matching_re = r'^(\s*(%s)\s*)(.*)$' % self.BOOT_PARAMS_STARTER
        kernel_parameter_line_pattern = re.compile(
            matching_re,
            flags = re.I | re.MULTILINE)
        out = self.tweak_first_match(
            content,
            kernel_parameter_line_pattern,
            apply_persistence_to_all_lines,
            self.param_operations(),
            self.param_operations_for_persistence())
        
        return self.post_process(out) 
Example #13
Source Project: multibootusb   Author: mbusb   File: discover.py    License: GNU General Public License v2.0 6 votes vote down vote up
def _match_major_minor(cls, value):
        """
        Match the number under the assumption that it is a major,minor pair.

        :param str value: value to match
        :returns: the device number or None
        :rtype: int or NoneType
        """
        major_minor_re = re.compile(
           r'^(?P<major>\d+)(\D+)(?P<minor>\d+)$'
        )
        match = major_minor_re.match(value)
        return match and os.makedev(
           int(match.group('major')),
           int(match.group('minor'))
        ) 
Example #14
Source Project: video2commons   Author: toolforge   File: urlextract.py    License: GNU General Public License v3.0 6 votes vote down vote up
def escape_wikitext(wikitext):
    """Escape wikitext for use in file description."""
    rep = OrderedDict([
        ('{|', '{{(}}&#124;'),
        ('|}', '&#124;{{)}}'),
        ('||', '&#124;&#124;'),
        ('|', '&#124;'),
        ('[[', '{{!((}}'),
        (']]', '{{))!}}'),
        ('{{', '{{((}}'),
        ('}}', '{{))}}'),
        ('{', '{{(}}'),
        ('}', '{{)}}'),
    ])
    rep = dict((re.escape(k), v) for k, v in rep.iteritems())
    pattern = re.compile("|".join(rep.keys()))
    return pattern.sub(lambda m: rep[re.escape(m.group(0))], wikitext)


# Source: mediawiki.Title.js@9df363d 
Example #15
Source Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: monitor.py    License: Apache License 2.0 6 votes vote down vote up
def __init__(self, interval, stat_func=None, pattern='.*', sort=False):
        if stat_func is None:
            def asum_stat(x):
                """returns |x|/size(x), async execution."""
                return ndarray.norm(x)/sqrt(x.size)
            stat_func = asum_stat
        self.stat_func = stat_func
        self.interval = interval
        self.activated = False
        self.queue = []
        self.step = 0
        self.exes = []
        self.re_prog = re.compile(pattern)
        self.sort = sort
        def stat_helper(name, array):
            """wrapper for executor callback"""
            array = ctypes.cast(array, NDArrayHandle)
            array = NDArray(array, writable=False)
            if not self.activated or not self.re_prog.match(py_str(name)):
                return
            self.queue.append((self.step, py_str(name), self.stat_func(array)))
        self.stat_helper = stat_helper 
Example #16
Source Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: rnn_layer.py    License: Apache License 2.0 6 votes vote down vote up
def _collect_params_with_prefix(self, prefix=''):
        if prefix:
            prefix += '.'
        pattern = re.compile(r'(l|r)(\d)_(i2h|h2h)_(weight|bias)\Z')
        def convert_key(m, bidirectional): # for compatibility with old parameter format
            d, l, g, t = [m.group(i) for i in range(1, 5)]
            if bidirectional:
                return '_unfused.{}.{}_cell.{}_{}'.format(l, d, g, t)
            else:
                return '_unfused.{}.{}_{}'.format(l, g, t)
        bidirectional = any(pattern.match(k).group(1) == 'r' for k in self._reg_params)

        ret = {prefix + convert_key(pattern.match(key), bidirectional) : val
               for key, val in self._reg_params.items()}
        for name, child in self._children.items():
            ret.update(child._collect_params_with_prefix(prefix + name))
        return ret 
Example #17
Source Project: DOTA_models   Author: ringringyi   File: conll2tree.py    License: Apache License 2.0 6 votes vote down vote up
def main(unused_argv):
  logging.set_verbosity(logging.INFO)
  with tf.Session() as sess:
    src = gen_parser_ops.document_source(batch_size=32,
                                         corpus_name=FLAGS.corpus_name,
                                         task_context=FLAGS.task_context)
    sentence = sentence_pb2.Sentence()
    while True:
      documents, finished = sess.run(src)
      logging.info('Read %d documents', len(documents))
      for d in documents:
        sentence.ParseFromString(d)
        tr = asciitree.LeftAligned()
        d = to_dict(sentence)
        print 'Input: %s' % sentence.text
        print 'Parse:'
        tr_str = tr(d)
        pat = re.compile(r'\s*@\d+$')
        for tr_ln in tr_str.splitlines():
          print pat.sub('', tr_ln)

      if finished:
        break 
Example #18
Source Project: DOTA_models   Author: ringringyi   File: vgslspecs.py    License: Apache License 2.0 6 votes vote down vote up
def AddMaxPool(self, prev_layer, index):
    """Add a maxpool layer.

    Args:
      prev_layer: Input tensor.
      index:      Position in model_str to start parsing

    Returns:
      Output tensor, end index in model_str.
    """
    pattern = re.compile(R'(Mp)({\w+})?(\d+),(\d+)(?:,(\d+),(\d+))?')
    m = pattern.match(self.model_str, index)
    if m is None:
      return None, None
    name = self._GetLayerName(m.group(0), index, m.group(2))
    height = int(m.group(3))
    width = int(m.group(4))
    y_stride = height if m.group(5) is None else m.group(5)
    x_stride = width if m.group(6) is None else m.group(6)
    self.reduction_factors[1] *= y_stride
    self.reduction_factors[2] *= x_stride
    return slim.max_pool2d(
        prev_layer, [height, width], [y_stride, x_stride],
        padding='SAME',
        scope=name), m.end() 
Example #19
Source Project: DOTA_models   Author: ringringyi   File: vgslspecs.py    License: Apache License 2.0 6 votes vote down vote up
def AddDropout(self, prev_layer, index):
    """Adds a dropout layer.

    Args:
      prev_layer: Input tensor.
      index:      Position in model_str to start parsing

    Returns:
      Output tensor, end index in model_str.
    """
    pattern = re.compile(R'(Do)({\w+})?')
    m = pattern.match(self.model_str, index)
    if m is None:
      return None, None
    name = self._GetLayerName(m.group(0), index, m.group(2))
    layer = slim.dropout(
        prev_layer, 0.5, is_training=self.is_training, scope=name)
    return layer, m.end() 
Example #20
Source Project: BERT-Classification-Tutorial   Author: Socialbird-AILab   File: modeling_test.py    License: Apache License 2.0 5 votes vote down vote up
def assert_all_tensors_reachable(self, sess, outputs):
        """Checks that all the tensors in the graph are reachable from outputs."""
        graph = sess.graph

        ignore_strings = [
            "^.*/dilation_rate$",
            "^.*/Tensordot/concat$",
            "^.*/Tensordot/concat/axis$",
            "^testing/.*$",
        ]

        ignore_regexes = [re.compile(x) for x in ignore_strings]

        unreachable = self.get_unreachable_ops(graph, outputs)
        filtered_unreachable = []
        for x in unreachable:
            do_ignore = False
            for r in ignore_regexes:
                m = r.match(x.name)
                if m is not None:
                    do_ignore = True
            if do_ignore:
                continue
            filtered_unreachable.append(x)
        unreachable = filtered_unreachable

        self.assertEqual(
            len(unreachable), 0, "The following ops are unreachable: %s" %
                                 (" ".join([x.name for x in unreachable]))) 
Example #21
Source Project: svviz   Author: svviz   File: track.py    License: MIT License 5 votes vote down vote up
def _drawCigar(self, alignment, yoffset, height, isFlanking):
        eachNuc = False # this gets to be computationally infeasible to display in the browser
        pattern = re.compile('([0-9]*)([MIDNSHP=X])')

        genomePosition = alignment.start
        sequencePosition = 0

        chromPartSeq = self.chromPartsCollection.getSeq(alignment.regionID)

        extras = {}
        if isFlanking:
            extras = {"class":"flanking"}
        for length, code in pattern.findall(alignment.cigar):
            length = int(length)
            if code == "M":
                for i in range(length):
                    curstart = self.scale.topixels(genomePosition+i, alignment.regionID)
                    curend = self.scale.topixels(genomePosition+i+1, alignment.regionID)

                    color = self.nucColors[alignment.seq[sequencePosition+i]]

                    alt = alignment.seq[sequencePosition+i]
                    ref = chromPartSeq[genomePosition+i]
                    
                    if eachNuc or alt!=ref:
                        self.svg.rect(curstart, yoffset, curend-curstart, height, fill=color, **extras)

                sequencePosition += length
                genomePosition += length
            elif code in "D":
                curstart = self.scale.topixels(genomePosition, alignment.regionID)
                curend = self.scale.topixels(genomePosition+length+1, alignment.regionID)
                self.svg.rect(curstart, yoffset, curend-curstart, height, fill=self.deletionColor, **extras)

                genomePosition += length
            elif code in "IHS":
                curstart = self.scale.topixels(genomePosition-0.5, alignment.regionID)
                curend = self.scale.topixels(genomePosition+0.5, alignment.regionID)
                self.svg.rect(curstart, yoffset, curend-curstart, height, fill=self.insertionColor, **extras)

                sequencePosition += length 
Example #22
Source Project: incubator-spot   Author: apache   File: file_watcher.py    License: Apache License 2.0 5 votes vote down vote up
def __init__(self, path, supported_files, recursive):
        self._logger  = logging.getLogger('SPOT.INGEST.COMMON.FILE_WATCHER')
        self._queue   = []

        super(FileWatcher, self).__init__()

        self._logger.info('Schedule watching "{0}" directory.'.format(path))
        super(FileWatcher, self).schedule(NewFileEventHandler(self), path, recursive)

        self._regexs  = [re.compile(x) for x in supported_files]
        pattern_names = ', '.join(['"%s"' % x for x in supported_files])
        self._logger.info('Supported filenames: {0}'.format(pattern_names))

        self._logger.info('The search in sub-directories is {0}.'
            .format('enabled' if recursive else 'disabled')) 
Example #23
Source Project: jumpserver-python-sdk   Author: jumpserver   File: models.py    License: GNU General Public License v2.0 5 votes vote down vote up
def _pattern(self):
        if self.__pattern:
            return self.__pattern
        if self.type['value'] == 'command':
            regex = []
            content = self.content.replace('\r\n', '\n')
            for cmd in content.split('\n'):
                cmd = cmd.replace(' ', '\s+')
                regex.append(r'\b{0}\b'.format(cmd))
            self.__pattern = re.compile(r'{}'.format('|'.join(regex)))
        else:
            self.__pattern = re.compile(r'{0}'.format(self.content))
        return self.__pattern 
Example #24
Source Project: cyberdisc-bot   Author: CyberDiscovery   File: cyber.py    License: MIT License 5 votes vote down vote up
def __init__(self, bot: Bot):
        self.bot = bot

        self.matches = [
            (re.compile(i[0], re.IGNORECASE), i[1]) for i in self.match_strings
        ] 
Example #25
Source Project: drydock   Author: airshipit   File: util.py    License: Apache License 2.0 5 votes vote down vote up
def calculate_bytes(size_str):
        """
        Calculate the size in bytes of a size_str.

        #m or #M or #mb or #MB = # * 1024 * 1024
        #g or #G or #gb or #GB = # * 1024 * 1024 * 1024
        #t or #T or #tb or #TB = # * 1024 * 1024 * 1024 * 1024

        :param size_str: A string representing the desired size
        :return size: The calculated size in bytes
        """
        pattern = r'(\d+)([mMbBgGtT]{1,2})'
        regex = re.compile(pattern)
        match = regex.match(size_str)

        if not match:
            raise errors.InvalidSizeFormat(
                "Invalid size string format: %s" % size_str)

        base_size = int(match.group(1))

        if match.group(2) in ['m', 'M', 'mb', 'MB']:
            computed_size = base_size * (1000 * 1000)
        elif match.group(2) in ['g', 'G', 'gb', 'GB']:
            computed_size = base_size * (1000 * 1000 * 1000)
        elif match.group(2) in ['t', 'T', 'tb', 'TB']:
            computed_size = base_size * (1000 * 1000 * 1000 * 1000)

        return computed_size 
Example #26
Source Project: drydock   Author: airshipit   File: bootaction_validity.py    License: Apache License 2.0 5 votes vote down vote up
def __init__(self):
        super().__init__('Bootaction pkg_list Validation', 'DD4002')
        version_fields = r'(\d+:)?([a-zA-Z0-9.+~-]+)(-[a-zA-Z0-9.+~]+)'
        self.version_fields = re.compile(version_fields) 
Example #27
Source Project: drydock   Author: airshipit   File: hostname_validity.py    License: Apache License 2.0 5 votes vote down vote up
def run_validation(self, site_design, orchestrator=None):
        # Check FQDN length is <= 255 characters per RFC 1035

        node_list = site_design.baremetal_nodes or []
        invalid_nodes = [
            n for n in node_list if len(n.get_fqdn(site_design)) > 255
        ]

        for n in invalid_nodes:
            msg = "FQDN %s is invalid, greater than 255 characters." % n.get_fqdn(
                site_design)
            self.report_error(
                msg, [n.doc_ref],
                "RFC 1035 requires full DNS names to be < 256 characters.")

        # Check each label in the domain name is <= 63 characters per RFC 1035
        # and only contains A-Z,a-z,0-9,-

        valid_label = re.compile('[a-z0-9-]{1,63}', flags=re.I)

        for n in node_list:
            domain_labels = n.get_fqdn(site_design).split('.')
            for domain_label in domain_labels:
                if not valid_label.fullmatch(domain_label):
                    msg = "FQDN %s is invalid - label '%s' is invalid." % (
                        n.get_fqdn(site_design), domain_label)
                    self.report_error(
                        msg, [n.doc_ref],
                        "RFC 1035 requires each label in a DNS name to be <= 63 characters and contain "
                        "only A-Z, a-z, 0-9, and hyphens.") 
Example #28
Source Project: drydock   Author: airshipit   File: middleware.py    License: Apache License 2.0 5 votes vote down vote up
def __init__(self):
        # Setup validation pattern for external marker
        UUIDv4_pattern = '^[0-9A-F]{8}-[0-9A-F]{4}-4[0-9A-F]{3}-[89AB][0-9A-F]{3}-[0-9A-F]{12}$'
        self.marker_re = re.compile(UUIDv4_pattern, re.I) 
Example #29
Source Project: drydock   Author: airshipit   File: test_validation_rule_storage_sizing.py    License: Apache License 2.0 5 votes vote down vote up
def test_invalid_storage_sizing(self, deckhand_ingester, drydock_state,
                                    input_files, mock_get_build_data):

        input_file = input_files.join("invalid_validation.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = StorageSizing()
        message_list = validator.execute(site_design, orchestrator=orch)

        regex = re.compile(
            '(Storage partition)|(Logical Volume) .+ size is < 0')
        regex_1 = re.compile('greater than 99%')

        assert len(message_list) == 8
        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert regex.search(
                msg.get('message')) is not None or regex_1.search(
                    msg.get('message')) is not None
            assert msg.get('error') is True 
Example #30
Source Project: drydock   Author: airshipit   File: test_validation_rule_unique_network.py    License: Apache License 2.0 5 votes vote down vote up
def test_invalid_unique_network(self, mocker, deckhand_ingester,
                                    drydock_state, input_files):

        input_file = input_files.join("invalid_unique_network.yaml")
        design_ref = "file://%s" % str(input_file)

        orch = Orchestrator(
            state_manager=drydock_state, ingester=deckhand_ingester)

        status, site_design = Orchestrator.get_effective_site(orch, design_ref)

        validator = UniqueNetworkCheck()
        message_list = validator.execute(site_design, orchestrator=orch)

        regex = re.compile(
            r'Allowed network .+ duplicated on NetworkLink .+ and NetworkLink .+'
        )
        regex_1 = re.compile(
            r'Interface \S+ attached to network \S+ not allowed on interface link'
        )

        assert len(message_list) >= 2

        for msg in message_list:
            msg = msg.to_dict()
            LOG.debug(msg)
            assert msg.get('error')
            assert any([
                regex.search(msg.get('message')),
                regex_1.search(msg.get('message'))
            ])