Python collections.Sequence() Examples

The following are code examples for showing how to use collections.Sequence(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: bigquerylayers   Author: smandaric   File: _helpers.py    GNU General Public License v3.0 6 votes vote down vote up
def to_query_parameters_list(parameters):
    """Converts a sequence of parameter values into query parameters.

    :type parameters: Sequence[Any]
    :param parameters: Sequence of query parameter values.

    :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter]
    :returns: A list of query parameters.
    """
    result = []

    for value in parameters:
        if isinstance(value, collections_abc.Mapping):
            raise NotImplementedError("STRUCT-like parameter values are not supported.")
        elif array_like(value):
            param = array_to_query_parameter(value)
        else:
            param = scalar_to_query_parameter(value)
        result.append(param)

    return result 
Example 2
Project: bigquerylayers   Author: smandaric   File: _helpers.py    GNU General Public License v3.0 6 votes vote down vote up
def to_query_parameters(parameters):
    """Converts DB-API parameter values into query parameters.

    :type parameters: Mapping[str, Any] or Sequence[Any]
    :param parameters: A dictionary or sequence of query parameter values.

    :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter]
    :returns: A list of query parameters.
    """
    if parameters is None:
        return []

    if isinstance(parameters, collections_abc.Mapping):
        return to_query_parameters_dict(parameters)

    return to_query_parameters_list(parameters) 
Example 3
Project: bigquerylayers   Author: smandaric   File: _helpers.py    GNU General Public License v3.0 6 votes vote down vote up
def array_like(value):
    """Determine if the given value is array-like.

    Examples of array-like values (as interpreted by this function) are
    sequences such as ``list`` and ``tuple``, but not strings and other
    iterables such as sets.

    Args:
        value (Any)

    Returns:
        bool: ``True`` if the value is considered array-like, ``False`` otherwise.
    """
    return isinstance(value, collections_abc.Sequence) and not isinstance(
        value, (six.text_type, six.binary_type, bytearray)
    ) 
Example 4
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 6 votes vote down vote up
def __normalize_containers(self, mapping, schema):
        for field in mapping:
            if field not in schema:
                continue
            if isinstance(mapping[field], Mapping):
                if 'keyschema' in schema[field]:
                    self.__normalize_mapping_per_keyschema(
                        field, mapping, schema[field]['keyschema'])
                if 'valueschema' in schema[field]:
                    self.__normalize_mapping_per_valueschema(
                        field, mapping, schema[field]['valueschema'])
                if set(schema[field]) & set(('allow_unknown', 'purge_unknown',
                                             'schema')):
                    self.__normalize_mapping_per_schema(field, mapping, schema)
            elif isinstance(mapping[field], _str_type):
                continue
            elif isinstance(mapping[field], Sequence) and \
                    'schema' in schema[field]:
                self.__normalize_sequence(field, mapping, schema) 
Example 5
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 6 votes vote down vote up
def __validate_dependencies_mapping(self, dependencies, field):
        validated_deps = 0
        for dep_name, dep_values in dependencies.items():
            if (not isinstance(dep_values, Sequence) or
                    isinstance(dep_values, _str_type)):
                dep_values = [dep_values]
            context = self.document.copy()
            parts = dep_name.split('.')
            info = {}

            for part in parts:
                if part in context:
                    context = context[part]
                    if context in dep_values:
                        validated_deps += 1
                    else:
                        info.update({dep_name: context})

        if validated_deps != len(dependencies):
            self._error(field, errors.DEPENDENCIES_FIELD_VALUE, info) 
Example 6
Project: klembord   Author: OzymandiasTheGreat   File: __init__.py    MIT License 6 votes vote down vote up
def get(self, targets):
		"""Get the contents of specified formats/targets.

		To get the list of available formats/targets include a special target
		'TARGETS'.

		Args:
			targets (Sequence): A sequence of strings specifying which
				formats/targets to get.
		Returns:
			Mapping: A mapping where key is specified format/target
				and value is bytes object representing the data.
				If targets included 'TARGETS', it's value will be a tuple of
				strings representing available formats/targets.
				On Linux :class:`dict` is used, on Windows :class:`OrderedDict`
				is used instead.
		"""

		if isinstance(targets, Sequence):
			return self._interface.get(targets)
		else:
			raise TypeError('targets is not a Sequence') 
Example 7
Project: klembord   Author: OzymandiasTheGreat   File: __init__.py    MIT License 6 votes vote down vote up
def get(targets):
	"""Get the contents of specified formats/targets.

	To get the list of available formats/targets include a special target
	'TARGETS'.

	Args:
		targets (Sequence): A sequence of strings specifying which
			formats/targets to get.
	Returns:
		Mapping: A mapping where key is specified format/target
			and value is bytes object representing the data.
			If targets included 'TARGETS', it's value will be a tuple of
			strings representing available formats/targets.
			On Linux :class:`dict` is used, on Windows :class:`OrderedDict`
			is used instead.
	"""

	global SELECTION
	if SELECTION is None:
		SELECTION = Selection()
	return SELECTION.get(targets) 
Example 8
Project: Attention-Gated-Networks   Author: ozan-oktay   File: aggregated_classifier.py    MIT License 6 votes vote down vote up
def aggregate_output(self):
        """Given a list of predictions from net, make a decision based on aggreagation rule"""
        if isinstance(self.predictions, collections.Sequence):
            logits = []
            for pred in self.predictions:
                logit = self.net.apply_argmax_softmax(pred).unsqueeze(0)
                logits.append(logit)

            logits = torch.cat(logits, 0)
            if self.aggregation == 'max':
                self.pred = logits.data.max(0)[0].max(1)
            elif self.aggregation == 'mean':
                self.pred = logits.data.mean(0).max(1)
            elif self.aggregation == 'weighted_mean':
                self.pred = (self.aggregation_weight.expand_as(logits) * logits).data.mean(0).max(1)
            elif self.aggregation == 'idx':
                self.pred = logits[self.aggregation_param].data.max(1)
        else:
            # Apply a softmax and return a segmentation map
            self.logits = self.net.apply_argmax_softmax(self.predictions)
            self.pred = self.logits.data.max(1) 
Example 9
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 6 votes vote down vote up
def __call__(self, image):
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(
                self.sigma, random_state=self.random_state)
        else:
            sigma = self.sigma
        if isinstance(self.mean, collections.Sequence):
            mean = random_num_generator(
                self.mean, random_state=self.random_state)
        else:
            mean = self.mean
        row, col, ch = image.shape
        gauss = self.random_state.normal(mean, sigma, (row, col, ch))
        gauss = gauss.reshape(row, col, ch)
        image += image * gauss
        return image 
Example 10
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 6 votes vote down vote up
def __call__(self, img):
        for t in self.transforms:
            if isinstance(t, collections.Sequence):
                assert isinstance(img, collections.Sequence) and len(img) == len(
                    t), "size of image group and transform group does not fit"
                tmp_ = []
                for i, im_ in enumerate(img):
                    if callable(t[i]):
                        tmp_.append(t[i](im_))
                    else:
                        tmp_.append(im_)
                img = tmp_
            elif callable(t):
                img = t(img)
            elif t is None:
                continue
            else:
                raise Exception('unexpected type')
        return img 
Example 11
Project: opencv_transforms   Author: jbohnslav   File: functional.py    MIT License 6 votes vote down vote up
def normalize(tensor, mean, std):
    """Normalize a tensor image with mean and standard deviation.
    .. note::
        This transform acts in-place, i.e., it mutates the input tensor.
    See :class:`~torchvision.transforms.Normalize` for more details.
    Args:
        tensor (Tensor): Tensor image of size (C, H, W) to be normalized.
        mean (sequence): Sequence of means for each channel.
        std (sequence): Sequence of standard deviations for each channely.
    Returns:
        Tensor: Normalized Tensor image.
    """
    if not _is_tensor_image(tensor):
        raise TypeError('tensor is not a torch image.')

    # This is faster than using broadcasting, don't change without benchmarking
    for t, m, s in zip(tensor, mean, std):
        t.sub_(m).div_(s)
    return tensor 
Example 12
Project: ovsdbapp   Author: otherwiseguy   File: idlutils.py    Apache License 2.0 6 votes vote down vote up
def db_replace_record(obj):
    """Replace any api.Command objects with their results

    This method should leave obj untouched unless the object contains an
    api.Command object.
    """
    if isinstance(obj, collections.Mapping):
        for k, v in six.iteritems(obj):
            if isinstance(v, api.Command):
                obj[k] = v.result
    elif (isinstance(obj, collections.Sequence)
          and not isinstance(obj, six.string_types)):
        for i, v in enumerate(obj):
            if isinstance(v, api.Command):
                try:
                    obj[i] = v.result
                except TypeError:
                    # NOTE(twilson) If someone passes a tuple, then just return
                    # a tuple with the Commands replaced with their results
                    return type(obj)(getattr(v, "result", v) for v in obj)
    elif isinstance(obj, api.Command):
        obj = obj.result
    return obj 
Example 13
Project: cimon_controller   Author: SchweizerischeBundesbahnen   File: test_apiserveroutput.py    Apache License 2.0 6 votes vote down vote up
def __assert_all_in_original_recursive__(self, api_response_part, original_part):
        if isinstance(api_response_part, dict):
            self.assertIsInstance(original_part, dict)
            for key in api_response_part:
                self.assertIn(key, original_part)
                self.__assert_all_in_original_recursive__(api_response_part[key], original_part[key])
        elif isinstance(api_response_part, collections.Sequence) and not isinstance(api_response_part, str):
            self.assertIsInstance(original_part, collections.Sequence)
            self.assertNotIsInstance(original_part, str)
            for value in api_response_part:
                if(isinstance(value, dict)):
                   self.assertTrue(self.__find_partial_dict_in_list__(value, original_part))
                else:
                    self.assertIn(value, original_part)
        else:
            self.assertEqual(api_response_part, original_part) 
Example 14
Project: threatdetectionservice   Author: flyballlabs   File: pyparsing.py    Apache License 2.0 6 votes vote down vote up
def __init__( self, exprs, savelist = False ):
        super(ParseExpression,self).__init__(savelist)
        if isinstance( exprs, _generatorType ):
            exprs = list(exprs)

        if isinstance( exprs, basestring ):
            self.exprs = [ Literal( exprs ) ]
        elif isinstance( exprs, collections.Sequence ):
            # if sequence of strings provided, wrap with Literal
            if all(isinstance(expr, basestring) for expr in exprs):
                exprs = map(Literal, exprs)
            self.exprs = list(exprs)
        else:
            try:
                self.exprs = list( exprs )
            except TypeError:
                self.exprs = [ exprs ]
        self.callPreparse = False 
Example 15
Project: threatdetectionservice   Author: flyballlabs   File: pyparsing.py    Apache License 2.0 6 votes vote down vote up
def __init__( self, exprs, savelist = False ):
        super(ParseExpression,self).__init__(savelist)
        if isinstance( exprs, _generatorType ):
            exprs = list(exprs)

        if isinstance( exprs, basestring ):
            self.exprs = [ Literal( exprs ) ]
        elif isinstance( exprs, collections.Sequence ):
            # if sequence of strings provided, wrap with Literal
            if all(isinstance(expr, basestring) for expr in exprs):
                exprs = map(Literal, exprs)
            self.exprs = list(exprs)
        else:
            try:
                self.exprs = list( exprs )
            except TypeError:
                self.exprs = [ exprs ]
        self.callPreparse = False 
Example 16
Project: vulscan   Author: vulscanteam   File: pyparsing.py    MIT License 6 votes vote down vote up
def __init__( self, exprs, savelist = False ):
        super(ParseExpression,self).__init__(savelist)
        if isinstance( exprs, _generatorType ):
            exprs = list(exprs)

        if isinstance( exprs, basestring ):
            self.exprs = [ Literal( exprs ) ]
        elif isinstance( exprs, collections.Sequence ):
            # if sequence of strings provided, wrap with Literal
            if all(isinstance(expr, basestring) for expr in exprs):
                exprs = map(Literal, exprs)
            self.exprs = list(exprs)
        else:
            try:
                self.exprs = list( exprs )
            except TypeError:
                self.exprs = [ exprs ]
        self.callPreparse = False 
Example 17
Project: Counterfactual-StoryRW   Author: qkaren   File: hierarchical_encoders.py    MIT License 6 votes vote down vote up
def flatten(x):
        """Flattens a cell state by concatenating a sequence of cell
        states along the last dimension. If the cell states are
        :tf_main:`LSTMStateTuple <contrib/rnn/LSTMStateTuple>`, only the
        hidden `LSTMStateTuple.h` is used.

        This process is used by default if :attr:`medium` is not provided
        to :meth:`_build`.
        """
        if isinstance(x, LSTMStateTuple):
            return x.h
        if isinstance(x, collections.Sequence):
            return tf.concat(
                [HierarchicalRNNEncoder.flatten(v) for v in x], -1)
        else:
            return x 
Example 18
Project: speaksee   Author: aimagelab   File: dataset.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def collate_fn(self):
        def collate(batch):
            if len(self.fields) == 1:
                batch = [batch, ]
            else:
                batch = list(zip(*batch))

            tensors = []
            for field, data in zip(self.fields.values(), batch):
                tensor = field.process(data)
                if isinstance(tensor, collections.Sequence) and any(isinstance(t, torch.Tensor) for t in tensor):
                    tensors.extend(tensor)
                else:
                    tensors.append(tensor)

            if len(tensors) > 1:
                return tensors
            else:
                return tensors[0]

        return collate 
Example 19
Project: siamese-mask-rcnn_mmdetection   Author: phj128   File: utils.py    Apache License 2.0 6 votes vote down vote up
def to_tensor(data):
    """Convert objects of various python types to :obj:`torch.Tensor`.

    Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`,
    :class:`Sequence`, :class:`int` and :class:`float`.
    """
    if isinstance(data, torch.Tensor):
        return data
    elif isinstance(data, np.ndarray):
        return torch.from_numpy(data)
    elif isinstance(data, Sequence) and not mmcv.is_str(data):
        return torch.tensor(data)
    elif isinstance(data, int):
        return torch.LongTensor([data])
    elif isinstance(data, float):
        return torch.FloatTensor([data])
    else:
        raise TypeError('type {} cannot be converted to tensor.'.format(
            type(data))) 
Example 20
Project: EMANet   Author: XiaLiPKU   File: data_parallel.py    GNU General Public License v3.0 6 votes vote down vote up
def dict_gather(outputs, target_device, dim=0):
    """
    Gathers variables from different GPUs on a specified device
      (-1 means the CPU), with dictionary support.
    """
    def gather_map(outputs):
        out = outputs[0]
        if isinstance(out, Variable):
            # MJY(20180330) HACK:: force nr_dims > 0
            if out.dim() == 0:
                outputs = [o.unsqueeze(0) for o in outputs]
            return Gather.apply(target_device, dim, *outputs)
        elif out is None:
            return None
        elif isinstance(out, collections.Mapping):
            return {k: gather_map([o[k] for o in outputs]) for k in out}
        elif isinstance(out, collections.Sequence):
            return type(out)(map(gather_map, zip(*outputs)))
    return gather_map(outputs) 
Example 21
Project: cct   Author: awacha   File: pinholeconfiguration.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, L1: Union[Sequence, SupportsFloat], L2: Union[Sequence, SupportsFloat], D1: float, D2: float,
                 ls: float, lbs: float, sd: float, mindist_l1: float = 0.0, mindist_l2: float = 0.0,
                 sealringwidth: float = 0.0, wavelength: float = 0.15418):
        if not isinstance(L1, collections.Iterable):
            L1 = [L1]
        self.l1_elements = L1
        if not isinstance(L2, collections.Iterable):
            L2 = [L2]
        self.l2_elements = L2
        self.mindist_l1 = mindist_l1
        self.mindist_l2 = mindist_l2
        self.sealringwidth = sealringwidth
        self.r1 = D1 * 0.5e-3
        self.r2 = D2 * 0.5e-3
        self.ls = ls
        self.lbs = lbs
        self.sd = sd
        self.wavelength = wavelength 
Example 22
Project: parsechain   Author: Suor   File: chains.py    BSD 2-Clause "Simplified" License 5 votes vote down vote up
def map(f):
        if not callable(f) and isinstance(f, (Mapping, Sequence)):
            f = C.multi(f)
        return lambda els: lmap(f, els) 
Example 23
Project: pyblish-win   Author: pyblish   File: test_fixers.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_operator_isSequenceType(self):
        b = "operator.isSequenceType(x)"
        a = "import collections\nisinstance(x, collections.Sequence)"
        self.check(b, a) 
Example 24
Project: pyblish-win   Author: pyblish   File: test_fixers.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_bare_operator_isSequenceType(self):
        s = "isSequenceType(z)"
        t = "You should use 'isinstance(z, collections.Sequence)' here."
        self.warns_unchanged(s, t) 
Example 25
Project: pyblish-win   Author: pyblish   File: factory.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def CheckSqliteRowAsSequence(self):
        """ Checks if the row object can act like a sequence """
        self.con.row_factory = sqlite.Row
        row = self.con.execute("select 1 as a, 2 as b").fetchone()

        as_tuple = tuple(row)
        self.assertEqual(list(reversed(row)), list(reversed(as_tuple)))
        self.assertIsInstance(row, Sequence) 
Example 26
Project: pyblish-win   Author: pyblish   File: test_collections.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_Sequence(self):
        for sample in [tuple, list, str]:
            self.assertIsInstance(sample(), Sequence)
            self.assertTrue(issubclass(sample, Sequence))
        self.assertTrue(issubclass(basestring, Sequence))
        self.assertIsInstance(range(10), Sequence)
        self.assertTrue(issubclass(xrange, Sequence))
        self.assertTrue(issubclass(str, Sequence))
        self.validate_abstract_methods(Sequence, '__contains__', '__iter__', '__len__',
            '__getitem__') 
Example 27
Project: bigquerylayers   Author: smandaric   File: ordered_set.py    GNU General Public License v3.0 5 votes vote down vote up
def __eq__(self, other):
        """
        Returns true if the containers have the same items. If `other` is a
        Sequence, then order is checked, otherwise it is ignored.

        Example:
            >>> oset = OrderedSet([1, 3, 2])
            >>> oset == [1, 3, 2]
            True
            >>> oset == [1, 2, 3]
            False
            >>> oset == [2, 3]
            False
            >>> oset == OrderedSet([3, 2, 1])
            False
        """
        # In Python 2 deque is not a Sequence, so treat it as one for
        # consistent behavior with Python 3.
        if isinstance(other, (Sequence, deque)):
            # Check that this OrderedSet contains the same elements, in the
            # same order, as the other object.
            return list(self) == list(other)
        try:
            other_as_set = set(other)
        except TypeError:
            # If `other` can't be converted into a set, it's not equal.
            return False
        else:
            return set(self) == other_as_set 
Example 28
Project: bigquerylayers   Author: smandaric   File: _parameterized.py    GNU General Public License v3.0 5 votes vote down vote up
def _ParameterDecorator(naming_type, testcases):
  """Implementation of the parameterization decorators.

  Args:
    naming_type: The naming type.
    testcases: Testcase parameters.

  Returns:
    A function for modifying the decorated object.
  """
  def _Apply(obj):
    if isinstance(obj, type):
      _ModifyClass(
          obj,
          list(testcases) if not isinstance(testcases, collections_abc.Sequence)
          else testcases,
          naming_type)
      return obj
    else:
      return _ParameterizedTestIter(obj, testcases, naming_type)

  if _IsSingletonList(testcases):
    assert _NonStringIterable(testcases[0]), (
        'Single parameter argument must be a non-string iterable')
    testcases = testcases[0]

  return _Apply 
Example 29
Project: bigquerylayers   Author: smandaric   File: client.py    GNU General Public License v3.0 5 votes vote down vote up
def update_dataset(self, dataset, fields, retry=DEFAULT_RETRY):
        """Change some fields of a dataset.

        Use ``fields`` to specify which fields to update. At least one field
        must be provided. If a field is listed in ``fields`` and is ``None`` in
        ``dataset``, it will be deleted.

        If ``dataset.etag`` is not ``None``, the update will only
        succeed if the dataset on the server has the same ETag. Thus
        reading a dataset with ``get_dataset``, changing its fields,
        and then passing it to ``update_dataset`` will ensure that the changes
        will only be saved if no modifications to the dataset occurred
        since the read.

        Args:
            dataset (google.cloud.bigquery.dataset.Dataset):
                The dataset to update.
            fields (Sequence[str]):
                The properties of ``dataset`` to change (e.g. "friendly_name").
            retry (google.api_core.retry.Retry, optional):
                How to retry the RPC.

        Returns:
            google.cloud.bigquery.dataset.Dataset:
                The modified ``Dataset`` instance.
        """
        partial = dataset._build_resource(fields)
        if dataset.etag is not None:
            headers = {"If-Match": dataset.etag}
        else:
            headers = None
        api_response = self._call_api(
            retry, method="PATCH", path=dataset.path, data=partial, headers=headers
        )
        return Dataset.from_api_repr(api_response) 
Example 30
Project: bigquerylayers   Author: smandaric   File: client.py    GNU General Public License v3.0 5 votes vote down vote up
def update_model(self, model, fields, retry=DEFAULT_RETRY):
        """[Beta] Change some fields of a model.

        Use ``fields`` to specify which fields to update. At least one field
        must be provided. If a field is listed in ``fields`` and is ``None``
        in ``model``, the field value will be deleted.

        If ``model.etag`` is not ``None``, the update will only succeed if
        the model on the server has the same ETag. Thus reading a model with
        ``get_model``, changing its fields, and then passing it to
        ``update_model`` will ensure that the changes will only be saved if
        no modifications to the model occurred since the read.

        Args:
            model (google.cloud.bigquery.model.Model): The model to update.
            fields (Sequence[str]):
                The fields of ``model`` to change, spelled as the Model
                properties (e.g. "friendly_name").
            retry (google.api_core.retry.Retry):
                (Optional) A description of how to retry the API call.

        Returns:
            google.cloud.bigquery.model.Model:
                The model resource returned from the API call.
        """
        partial = model._build_resource(fields)
        if model.etag:
            headers = {"If-Match": model.etag}
        else:
            headers = None
        api_response = self._call_api(
            retry, method="PATCH", path=model.path, data=partial, headers=headers
        )
        return Model.from_api_repr(api_response) 
Example 31
Project: bigquerylayers   Author: smandaric   File: _helpers.py    GNU General Public License v3.0 5 votes vote down vote up
def array_to_query_parameter(value, name=None):
    """Convert an array-like value into a query parameter.

    Args:
        value (Sequence[Any]): The elements of the array (should not be a
            string-like Sequence).
        name (Optional[str]): Name of the query parameter.

    Returns:
        A query parameter corresponding with the type and value of the plain
        Python object.

    Raises:
        :class:`~google.cloud.bigquery.dbapi.exceptions.ProgrammingError`
        if the type of array elements cannot be determined.
    """
    if not array_like(value):
        raise exceptions.ProgrammingError(
            "The value of parameter {} must be a sequence that is "
            "not string-like.".format(name)
        )

    if not value:
        raise exceptions.ProgrammingError(
            "Encountered an empty array-like value of parameter {}, cannot "
            "determine array elements type.".format(name)
        )

    # Assume that all elements are of the same type, and let the backend handle
    # any type incompatibilities among the array elements
    array_type = bigquery_scalar_type(value[0])
    if array_type is None:
        raise exceptions.ProgrammingError(
            "Encountered unexpected first array element of parameter {}, "
            "cannot determine array elements type.".format(name)
        )

    return bigquery.ArrayQueryParameter(name, array_type, value) 
Example 32
Project: openhatch   Author: campbe13   File: functional.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def maybe_list(l):
    if isinstance(l, Sequence):
        return l
    return [l] 
Example 33
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def _validate_allowed(self, allowed_values, field, value):
        """ {'type': 'list'} """
        if isinstance(value, _str_type):
            if value not in allowed_values:
                self._error(field, errors.UNALLOWED_VALUE, value)
        elif isinstance(value, Sequence) and not isinstance(value, _str_type):
            unallowed = set(value) - set(allowed_values)
            if unallowed:
                self._error(field, errors.UNALLOWED_VALUES, list(unallowed))
        elif isinstance(value, int):
            if value not in allowed_values:
                self._error(field, errors.UNALLOWED_VALUE, value) 
Example 34
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def _validate_dependencies(self, dependencies, field, value):
        """ {'type': ['dict', 'hashable', 'hashables']} """
        if isinstance(dependencies, _str_type):
            dependencies = [dependencies]

        if isinstance(dependencies, Sequence):
            self.__validate_dependencies_sequence(dependencies, field)
        elif isinstance(dependencies, Mapping):
            self.__validate_dependencies_mapping(dependencies, field)

        if self.document_error_tree.fetch_node_from(
                self.schema_path + (field, 'dependencies')) is not None:
            return True 
Example 35
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def _validate_forbidden(self, forbidden_values, field, value):
        """ {'type': 'list'} """
        if isinstance(value, _str_type):
            if value in forbidden_values:
                self._error(field, errors.FORBIDDEN_VALUE, value)
        elif isinstance(value, Sequence):
            forbidden = set(value) & set(forbidden_values)
            if forbidden:
                self._error(field, errors.FORBIDDEN_VALUES, list(forbidden))
        elif isinstance(value, int):
            if value in forbidden_values:
                self._error(field, errors.FORBIDDEN_VALUE, value) 
Example 36
Project: matchminer-engine   Author: dfci   File: cerberus.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def _validate_type_list(self, value):
        if isinstance(value, Sequence) and not isinstance(
                value, _str_type):
            return True 
Example 37
Project: unitframe   Author: daviskirk   File: unitframe.py    MIT License 5 votes vote down vote up
def units(self, units):
        if units is None:
            units = 1
        elif isinstance(units, Mapping):
            # for dict like
            units = [to_unit(units[k]) for k in self.columns]
        elif isinstance(units, (Sequence, np.ndarray, pd.Series)):
            # for lists,  tuples and array likes
            units = [to_unit(units[i]) for i,k in enumerate(self.columns)]
        else:
            units = to_unit(units)
        self._units = pd.Series(units,index=self.columns, dtype=object) 
Example 38
Project: uplink   Author: prkumar   File: typing_.py    MIT License 5 votes vote down vote up
def convert(self, value):
        if isinstance(value, collections.Sequence):
            return list(map(self._elem_converter, value))
        else:
            # TODO: Handle the case where the value is not an sequence.
            return [self._elem_converter(value)] 
Example 39
Project: uplink   Author: prkumar   File: typing_.py    MIT License 5 votes vote down vote up
def _base_converter(self, type_):
        if isinstance(type_, BaseTypeConverter.Builder):
            return type_.build()
        elif self._check_typing(type_):
            if issubclass(type_.__origin__, self.typing.Sequence):
                return ListConverter(*type_.__args__)
            elif issubclass(type_.__origin__, self.typing.Mapping):
                return DictConverter(*type_.__args__) 
Example 40
Project: py-solc   Author: ethereum   File: types.py    MIT License 5 votes vote down vote up
def is_list_like(obj):
    return not is_string(obj) and isinstance(obj, collections.Sequence) 
Example 41
Project: misp42splunk   Author: remg427   File: util.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def listify(value):
    if isinstance(value, list):
        return value
    elif value is None:
        return []
    elif isinstance(value, string_type):
        return [value]
    elif isinstance(value, collections.Sequence):
        return list(value)
    else:
        return [value] 
Example 42
Project: misp42splunk   Author: remg427   File: compound.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def _coerce(self, value):
        if isinstance(value, list):
            return value
        elif isinstance(value, (string_type, Mapping)): # unacceptable iterables
            pass
        elif isinstance(value, Sequence):
            return value
        elif isinstance(value, Iterable):
            return value
        raise ConversionError('Could not interpret the value as a list') 
Example 43
Project: misp42splunk   Author: remg427   File: sortedlist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def __iter__(self):
        """
        Return an iterator over the Sequence.

        Iterating the Sequence while adding or deleting values may raise a
        `RuntimeError` or fail to iterate over all entries.
        """
        return chain.from_iterable(self._lists) 
Example 44
Project: misp42splunk   Author: remg427   File: sortedlist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def __reversed__(self):
        """
        Return an iterator to traverse the Sequence in reverse.

        Iterating the Sequence while adding or deleting values may raise a
        `RuntimeError` or fail to iterate over all entries.
        """
        return chain.from_iterable(map(reversed, reversed(self._lists))) 
Example 45
Project: misp42splunk   Author: remg427   File: sortedlist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def _make_cmp(self, seq_op, doc):
        "Make comparator method."
        def comparer(self, that):
            "Compare method for sorted list and sequence."
            # pylint: disable=protected-access
            if not isinstance(that, Sequence):
                return NotImplemented

            self_len = self._len
            len_that = len(that)

            if self_len != len_that:
                if seq_op is op.eq:
                    return False
                if seq_op is op.ne:
                    return True

            for alpha, beta in zip(self, that):
                if alpha != beta:
                    return seq_op(alpha, beta)

            return seq_op(self_len, len_that)

        comparer.__name__ = '__{0}__'.format(seq_op.__name__)
        doc_str = 'Return `True` if and only if Sequence is {0} `that`.'
        comparer.__doc__ = doc_str.format(doc)

        return comparer 
Example 46
Project: misp42splunk   Author: remg427   File: sortedlist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def __reversed__(self):
        """
        Return an iterator to traverse the Sequence in reverse.

        Iterating the Sequence while adding or deleting values may raise a
        `RuntimeError` or fail to iterate over all entries.
        """
        return chain.from_iterable(map(reversed, reversed(self._lists))) 
Example 47
Project: misp42splunk   Author: remg427   File: sortedlist.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def _make_cmp(self, seq_op, doc):
        "Make comparator method."
        def comparer(self, that):
            "Compare method for sorted list and sequence."
            # pylint: disable=protected-access
            if not isinstance(that, Sequence):
                return NotImplemented

            self_len = self._len
            len_that = len(that)

            if self_len != len_that:
                if seq_op is op.eq:
                    return False
                if seq_op is op.ne:
                    return True

            for alpha, beta in zip(self, that):
                if alpha != beta:
                    return seq_op(alpha, beta)

            return seq_op(self_len, len_that)

        comparer.__name__ = '__{0}__'.format(seq_op.__name__)
        doc_str = 'Return `True` if and only if Sequence is {0} `that`.'
        comparer.__doc__ = doc_str.format(doc)

        return comparer 
Example 48
Project: Attention-Gated-Networks   Author: ozan-oktay   File: aggregated_classifier.py    MIT License 5 votes vote down vote up
def compute_loss(self):
        """Compute loss function. Iterate over multiple output"""
        preds = self.predictions
        weights = self.weight
        if not isinstance(preds, collections.Sequence):
            preds = [preds]
            weights = [1]

        loss = 0
        for lmda, prediction in zip(weights, preds):
            if lmda == 0:
                continue
            loss += lmda * self.criterion(prediction, self.target)

        self.loss = loss 
Example 49
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __call__(self, images):
        if isinstance(images, collections.Sequence) or isinstance(images, np.ndarray):
            assert all([isinstance(i, np.ndarray)
                        for i in images]), 'only numpy array is supported'
            shapes = [list(i.shape) for i in images]
            for s in shapes:
                s[self.axis] = None
            assert all([s == shapes[0] for s in shapes]
                       ), 'shapes must be the same except the merge axis'
            return np.concatenate(images, axis=self.axis)
        else:
            raise Exception("obj is not a sequence (list, tuple, etc)") 
Example 50
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __init__(self, *slices, **kwargs):
        assert isinstance(slices, collections.Sequence)
        slices_ = []
        for s in slices:
            if isinstance(s, collections.Sequence):
                slices_.append(slice(*s))
            else:
                slices_.append(s)
        assert all([isinstance(s, slice) for s in slices_]
                   ), 'slices must be consist of slice instances'
        self.slices = slices_
        self.axis = kwargs.get('axis', -1) 
Example 51
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __call__(self, image):
        if isinstance(self.alpha, collections.Sequence):
            alpha = random_num_generator(self.alpha)
        else:
            alpha = self.alpha
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(self.sigma)
        else:
            sigma = self.sigma
        return elastic_transform(image, alpha=alpha, sigma=sigma) 
Example 52
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __call__(self, image):
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(self.sigma, random_state=self.random_state)
        else:
            sigma = self.sigma
        if isinstance(self.mean, collections.Sequence):
            mean = random_num_generator(self.mean, random_state=self.random_state)
        else:
            mean = self.mean
        row, col, ch = image.shape
        gauss = self.random_state.normal(mean, sigma, (row, col, ch))
        gauss = gauss.reshape(row, col, ch)
        image += gauss
        return image 
Example 53
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __call__(self, image):
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(
                self.sigma, random_state=self.random_state)
        else:
            sigma = self.sigma
        image = gaussian_filter(image, sigma=(sigma, sigma, 0))
        return image 
Example 54
Project: Attention-Gated-Networks   Author: ozan-oktay   File: myImageTransformations.py    MIT License 5 votes vote down vote up
def __call__(self, image):
        if isinstance(self.sigma, collections.Sequence):
            sigma = random_num_generator(
                self.sigma, random_state=self.random_state)
        else:
            sigma = self.sigma
        if isinstance(self.peak, collections.Sequence):
            peak = random_num_generator(
                self.peak, random_state=self.random_state)
        else:
            peak = self.peak
        bg = gaussian_filter(image, sigma=(sigma, sigma, 0))
        bg = poisson_downsampling(
            bg, peak=peak, random_state=self.random_state)
        return image + bg 
Example 55
Project: gym-tensorflow   Author: wookayin   File: tf_atari.py    Apache License 2.0 5 votes vote down vote up
def reset(self, indices=None, max_frames=None, name=None):
        '''Resets Atari instances with a random noop start (1-30) and set the maximum number of frames for the episode (default 100,000 * frameskip)
        '''
        if indices is None:
            indices = np.arange(self.batch_size)
        with tf.variable_scope(name, default_name='AtariReset'):
            noops = tf.random_uniform(tf.shape(indices), minval=1, maxval=31, dtype=tf.int32)
            if max_frames is None:
                max_frames = tf.ones_like(indices, dtype=tf.int32) * (100000 * self.frameskip)
            import collections
            if not isinstance(max_frames, collections.Sequence):
                max_frames = tf.ones_like(indices, dtype=tf.int32) * max_frames
            return gym_tensorflow_module.environment_reset(self.instances, indices, noops=noops, max_frames=max_frames) 
Example 56
Project: opencv_transforms   Author: jbohnslav   File: transforms.py    MIT License 5 votes vote down vote up
def __init__(self, padding, fill=0, padding_mode='constant'):
        assert isinstance(padding, (numbers.Number, tuple, list))
        assert isinstance(fill, (numbers.Number, str, tuple))
        assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric']
        if isinstance(padding, collections.Sequence) and len(padding) not in [2, 4]:
            raise ValueError("Padding must be an int or a 2, or 4 element tuple, not a " +
                             "{} element tuple".format(len(padding)))

        self.padding = padding
        self.fill = fill
        self.padding_mode = padding_mode 
Example 57
Project: ovsdbapp   Author: otherwiseguy   File: api.py    Apache License 2.0 5 votes vote down vote up
def val_to_py(val):
    """Convert a json ovsdb return value to native python object"""
    if isinstance(val, collections.Sequence) and len(val) == 2:
        if val[0] == "uuid":
            return uuid.UUID(val[1])
        elif val[0] == "set":
            return [val_to_py(x) for x in val[1]]
        elif val[0] == "map":
            return {val_to_py(x): val_to_py(y) for x, y in val[1]}
    return val 
Example 58
Project: RouteOptimization   Author: andre-le   File: _parameterized.py    MIT License 5 votes vote down vote up
def _ParameterDecorator(naming_type, testcases):
  """Implementation of the parameterization decorators.

  Args:
    naming_type: The naming type.
    testcases: Testcase parameters.

  Returns:
    A function for modifying the decorated object.
  """
  def _Apply(obj):
    if isinstance(obj, type):
      _ModifyClass(
          obj,
          list(testcases) if not isinstance(testcases, collections.Sequence)
          else testcases,
          naming_type)
      return obj
    else:
      return _ParameterizedTestIter(obj, testcases, naming_type)

  if _IsSingletonList(testcases):
    assert _NonStringIterable(testcases[0]), (
        'Single parameter argument must be a non-string iterable')
    testcases = testcases[0]

  return _Apply 
Example 59
Project: hse-python-ml   Author: saygogoplz   File: test_range.py    MIT License 5 votes vote down vote up
def test_iterable(self):
        self.assertTrue(issubclass(RangeIterator, Iterator))
        self.assertTrue(issubclass(Range, Iterable))
        self.assertTrue(issubclass(Range, Sequence))

        r = Range(2, 10, 5)
        self.assertTrue(isinstance(r, Iterable))
        self.assertTrue(isinstance(r, Sequence))
        self.assertFalse(isinstance(iter(r), Generator))
        self.assertTrue(isinstance(iter(r), Iterator))
        self.assertTrue(isinstance(RangeIterator(r), Iterator))
        self.assertFalse(isinstance(RangeIterator(r), Generator)) 
Example 60
Project: hse-python-ml   Author: saygogoplz   File: test_range.py    MIT License 5 votes vote down vote up
def test_iterable(self):
        self.assertTrue(issubclass(RangeIterator, Iterator))
        self.assertTrue(issubclass(Range, Iterable))
        self.assertTrue(issubclass(Range, Sequence))

        r = Range(2, 10, 5)
        self.assertTrue(isinstance(r, Iterable))
        self.assertTrue(isinstance(r, Sequence))
        self.assertFalse(isinstance(iter(r), Generator))
        self.assertTrue(isinstance(iter(r), Iterator))
        self.assertTrue(isinstance(RangeIterator(r), Iterator))
        self.assertFalse(isinstance(RangeIterator(r), Generator)) 
Example 61
Project: deoplete-asm   Author: deoplete-plugins   File: _parameterized.py    MIT License 5 votes vote down vote up
def _ParameterDecorator(naming_type, testcases):
  """Implementation of the parameterization decorators.

  Args:
    naming_type: The naming type.
    testcases: Testcase parameters.

  Returns:
    A function for modifying the decorated object.
  """
  def _Apply(obj):
    if isinstance(obj, type):
      _ModifyClass(
          obj,
          list(testcases) if not isinstance(testcases, collections.Sequence)
          else testcases,
          naming_type)
      return obj
    else:
      return _ParameterizedTestIter(obj, testcases, naming_type)

  if _IsSingletonList(testcases):
    assert _NonStringIterable(testcases[0]), (
        'Single parameter argument must be a non-string iterable')
    testcases = testcases[0]

  return _Apply 
Example 62
Project: speaksee   Author: aimagelab   File: dataset.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def collate_fn(self):
        def collate(batch):
            value_batch_flattened = list(itertools.chain(*batch))
            value_tensors_flattened = super(ValueDataset, self).collate_fn()(value_batch_flattened)

            lengths = [0, ] + list(itertools.accumulate([len(x) for x in batch]))
            if isinstance(value_tensors_flattened, collections.Sequence) \
                    and any(isinstance(t, torch.Tensor) for t in value_tensors_flattened):
                value_tensors = [[vt[s:e] for (s, e) in zip(lengths[:-1], lengths[1:])] for vt in value_tensors_flattened]
            else:
                value_tensors = [value_tensors_flattened[s:e] for (s, e) in zip(lengths[:-1], lengths[1:])]

            return value_tensors
        return collate 
Example 63
Project: UnsupervisedGeometryAwareRepresentationLearning   Author: hrhodin   File: _utils.py    GNU General Public License v3.0 5 votes vote down vote up
def convert_tensor(input_, device=None):
    if torch.is_tensor(input_):
        if device:
            input_ = input_.to(device=device)
        return input_
    elif isinstance(input_, string_classes):
        return input_
    elif isinstance(input_, collections.Mapping):
        return {k: convert_tensor(sample, device=device) for k, sample in input_.items()}
    elif isinstance(input_, collections.Sequence):
        return [convert_tensor(sample, device=device) for sample in input_]
    else:
        raise TypeError(("input must contain tensors, dicts or lists; found {}"
                         .format(type(input_)))) 
Example 64
Project: mxbox   Author: Lyken17   File: torchloader.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def default_collate(batch):
    "Puts each data field into a tensor with outer dimension batch size"
    if torch.is_tensor(batch[0]):
        out = None
        if _use_shared_memory:
            # If we're in a background process, concatenate directly into a
            # shared memory tensor to avoid an extra copy
            numel = sum([x.numel() for x in batch])
            storage = batch[0].storage()._new_shared(numel)
            out = batch[0].new(storage)
        return torch.stack(batch, 0, out=out)
    elif type(batch[0]).__module__ == 'numpy':
        elem = batch[0]
        if type(elem).__name__ == 'ndarray':
            return torch.stack([torch.from_numpy(b) for b in batch], 0)
        if elem.shape == ():  # scalars
            py_type = float if elem.dtype.name.startswith('float') else int
            return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
    elif isinstance(batch[0], int):
        return torch.LongTensor(batch)
    elif isinstance(batch[0], float):
        return torch.DoubleTensor(batch)
    elif isinstance(batch[0], string_classes):
        return batch
    elif isinstance(batch[0], collections.Mapping):
        return {key: default_collate([d[key] for d in batch]) for key in batch[0]}
    elif isinstance(batch[0], collections.Sequence):
        transposed = zip(*batch)
        return [default_collate(samples) for samples in transposed]

    raise TypeError(("batch must contain tensors, numbers, dicts or lists; found {}"
                     .format(type(batch[0])))) 
Example 65
Project: tensorflow-u-net   Author: FelixGruen   File: segmentation.py    GNU General Public License v3.0 5 votes vote down vote up
def resize(source, desired_size):
    """
    Resizes the input to the desired_size.

    Parameters
    ----------
    source : iterable
        An iterable over a number of datapoints where each datapoint is a tuple of a list of inputs and a parameter dictionary.
    desired_size: array or list of length 2
        The height and width of the output.

    Returns
    -------
    gen : generator
        A generator that yields each transformed datapoint as a tuple of a list of inputs and a parameter dictionary.
    """

    if not isinstance(desired_size, collections.Sequence) and not isinstance(desired_size, np.ndarray):
        TypeError("Desired size must be a sequence or array! Received: {}".format(type(desired_size)))

    desired_size = np.asarray(desired_size, dtype=np.int)

    def transformation(input_tuple):
        inputs, parameters = input_tuple

        inputs = __resize(inputs, desired_size)
        # parameters["spacing"] = tuple(np.array(parameters["spacing"]) * (parameters["size"] / desired_size.astype(np.float)))
        parameters["size"] = tuple(desired_size)

        return (inputs, parameters)

    return helper.apply(source, transformation) 
Example 66
Project: EMANet   Author: XiaLiPKU   File: th.py    GNU General Public License v3.0 5 votes vote down vote up
def as_variable(obj):
    if isinstance(obj, Variable):
        return obj
    if isinstance(obj, collections.Sequence):
        return [as_variable(v) for v in obj]
    elif isinstance(obj, collections.Mapping):
        return {k: as_variable(v) for k, v in obj.items()}
    else:
        return Variable(obj) 
Example 67
Project: EMANet   Author: XiaLiPKU   File: th.py    GNU General Public License v3.0 5 votes vote down vote up
def as_numpy(obj):
    if isinstance(obj, collections.Sequence):
        return [as_numpy(v) for v in obj]
    elif isinstance(obj, collections.Mapping):
        return {k: as_numpy(v) for k, v in obj.items()}
    elif isinstance(obj, Variable):
        return obj.data.cpu().numpy()
    elif torch.is_tensor(obj):
        return obj.cpu().numpy()
    else:
        return np.array(obj) 
Example 68
Project: EMANet   Author: XiaLiPKU   File: th.py    GNU General Public License v3.0 5 votes vote down vote up
def mark_volatile(obj):
    if torch.is_tensor(obj):
        obj = Variable(obj)
    if isinstance(obj, Variable):
        obj.no_grad = True
        return obj
    elif isinstance(obj, collections.Mapping):
        return {k: mark_volatile(o) for k, o in obj.items()}
    elif isinstance(obj, collections.Sequence):
        return [mark_volatile(o) for o in obj]
    else:
        return obj 
Example 69
Project: EMANet   Author: XiaLiPKU   File: dataloader.py    GNU General Public License v3.0 5 votes vote down vote up
def default_collate(batch):
    "Puts each data field into a tensor with outer dimension batch size"

    error_msg = "batch must contain tensors, numbers, dicts or lists; found {}"
    elem_type = type(batch[0])
    if torch.is_tensor(batch[0]):
        out = None
        if _use_shared_memory:
            # If we're in a background process, concatenate directly into a
            # shared memory tensor to avoid an extra copy
            numel = sum([x.numel() for x in batch])
            storage = batch[0].storage()._new_shared(numel)
            out = batch[0].new(storage)
        return torch.stack(batch, 0, out=out)
    elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \
            and elem_type.__name__ != 'string_':
        elem = batch[0]
        if elem_type.__name__ == 'ndarray':
            # array of string classes and object
            if re.search('[SaUO]', elem.dtype.str) is not None:
                raise TypeError(error_msg.format(elem.dtype))

            return torch.stack([torch.from_numpy(b) for b in batch], 0)
        if elem.shape == ():  # scalars
            py_type = float if elem.dtype.name.startswith('float') else int
            return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
    elif isinstance(batch[0], int_classes):
        return torch.LongTensor(batch)
    elif isinstance(batch[0], float):
        return torch.DoubleTensor(batch)
    elif isinstance(batch[0], string_classes):
        return batch
    elif isinstance(batch[0], collections.Mapping):
        return {key: default_collate([d[key] for d in batch]) for key in batch[0]}
    elif isinstance(batch[0], collections.Sequence):
        transposed = zip(*batch)
        return [default_collate(samples) for samples in transposed]

    raise TypeError((error_msg.format(type(batch[0])))) 
Example 70
Project: EMANet   Author: XiaLiPKU   File: dataloader.py    GNU General Public License v3.0 5 votes vote down vote up
def pin_memory_batch(batch):
    if torch.is_tensor(batch):
        return batch.pin_memory()
    elif isinstance(batch, string_classes):
        return batch
    elif isinstance(batch, collections.Mapping):
        return {k: pin_memory_batch(sample) for k, sample in batch.items()}
    elif isinstance(batch, collections.Sequence):
        return [pin_memory_batch(sample) for sample in batch]
    else:
        return batch 
Example 71
Project: cct   Author: awacha   File: pinholeconfiguration.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def l1(self) -> float:
        if isinstance(self.l1_elements, collections.Sequence):
            return float(sum(self.l1_elements) +
                         self.sealringwidth * (1 + len(self.l1_elements)) +
                         self.mindist_l1)
        else:
            return self.l1_elements 
Example 72
Project: cct   Author: awacha   File: pinholeconfiguration.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def l2(self) -> float:
        if isinstance(self.l2_elements, collections.Sequence):
            return float(sum(self.l2_elements) +
                         self.sealringwidth * (1 + len(self.l2_elements)) +
                         self.mindist_l2)
        else:
            return self.l2_elements 
Example 73
Project: reroils-data-legacy   Author: rero   File: api.py    GNU General Public License v2.0 4 votes vote down vote up
def find_by_holding(cls, **kwargs):
        """Find item versions based on their holdings information.

        Every given kwarg will be queried as a key-value pair in the items
        holding.

        :returns: List[(UUID, version_id)] with `version_id` as used by
                  `RecordMetadata.version_id`.
        """
        def _get_filter_clause(obj, key, value):
            val = obj[key].astext
            CASTS = {
                bool: lambda x: cast(x, BOOLEAN),
                int: lambda x: cast(x, INTEGER),
                datetime.date: lambda x: cast(x, DATE),
            }
            if (not isinstance(value, six.string_types) and
                    isinstance(value, collections.Sequence)):
                if len(value) == 2:
                    return CASTS[type(value[0])](val).between(*value)
                raise ValueError('Too few/many values for a range query. '
                                 'Range query requires two values.')
            return CASTS.get(type(value), lambda x: x)(val) == value

        RecordMetadataVersion = version_class(RecordMetadata)

        data = type_coerce(RecordMetadataVersion.json, JSONB)
        path = ('_circulation', 'holdings')

        subquery = db.session.query(
            RecordMetadataVersion.id.label('id'),
            RecordMetadataVersion.version_id.label('version_id'),
            func.json_array_elements(data[path]).label('obj')
        ).subquery()

        obj = type_coerce(subquery.c.obj, JSONB)

        query = db.session.query(
            RecordMetadataVersion.id,
            RecordMetadataVersion.version_id
        ).filter(
            RecordMetadataVersion.id == subquery.c.id,
            RecordMetadataVersion.version_id == subquery.c.version_id,
            *(_get_filter_clause(obj, k, v) for k, v in kwargs.items())
        )

        for result in query:
            yield result 
Example 74
Project: bigquerylayers   Author: smandaric   File: client.py    GNU General Public License v3.0 4 votes vote down vote up
def update_routine(self, routine, fields, retry=DEFAULT_RETRY):
        """[Beta] Change some fields of a routine.

        Use ``fields`` to specify which fields to update. At least one field
        must be provided. If a field is listed in ``fields`` and is ``None``
        in ``routine``, the field value will be deleted.

        .. warning::
           During beta, partial updates are not supported. You must provide
           all fields in the resource.

        If :attr:`~google.cloud.bigquery.routine.Routine.etag` is not
        ``None``, the update will only succeed if the resource on the server
        has the same ETag. Thus reading a routine with
        :func:`~google.cloud.bigquery.client.Client.get_routine`, changing
        its fields, and then passing it to this method will ensure that the
        changes will only be saved if no modifications to the resource
        occurred since the read.

        Args:
            routine (google.cloud.bigquery.routine.Routine): The routine to update.
            fields (Sequence[str]):
                The fields of ``routine`` to change, spelled as the
                :class:`~google.cloud.bigquery.routine.Routine` properties
                (e.g. ``type_``).
            retry (google.api_core.retry.Retry):
                (Optional) A description of how to retry the API call.

        Returns:
            google.cloud.bigquery.routine.Routine:
                The routine resource returned from the API call.
        """
        partial = routine._build_resource(fields)
        if routine.etag:
            headers = {"If-Match": routine.etag}
        else:
            headers = None

        # TODO: remove when routines update supports partial requests.
        partial["routineReference"] = routine.reference.to_api_repr()

        api_response = self._call_api(
            retry, method="PUT", path=routine.path, data=partial, headers=headers
        )
        return Routine.from_api_repr(api_response) 
Example 75
Project: bigquerylayers   Author: smandaric   File: client.py    GNU General Public License v3.0 4 votes vote down vote up
def insert_rows_from_dataframe(
        self, table, dataframe, selected_fields=None, chunk_size=500, **kwargs
    ):
        """Insert rows into a table from a dataframe via the streaming API.

        Args:
            table (Union[ \
                :class:`~google.cloud.bigquery.table.Table`, \
                :class:`~google.cloud.bigquery.table.TableReference`, \
                str, \
            ]):
                The destination table for the row data, or a reference to it.
            dataframe (pandas.DataFrame):
                A :class:`~pandas.DataFrame` containing the data to load.
            selected_fields (Sequence[ \
                :class:`~google.cloud.bigquery.schema.SchemaField`, \
            ]):
                The fields to return. Required if ``table`` is a
                :class:`~google.cloud.bigquery.table.TableReference`.
            chunk_size (int):
                The number of rows to stream in a single chunk. Must be positive.
            kwargs (dict):
                Keyword arguments to
                :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`.

        Returns:
            Sequence[Sequence[Mappings]]:
                A list with insert errors for each insert chunk. Each element
                is a list containing one mapping per row with insert errors:
                the "index" key identifies the row, and the "errors" key
                contains a list of the mappings describing one or more problems
                with the row.

        Raises:
            ValueError: if table's schema is not set
        """
        insert_results = []

        chunk_count = int(math.ceil(len(dataframe) / chunk_size))
        rows_iter = (
            dict(six.moves.zip(dataframe.columns, row))
            for row in dataframe.itertuples(index=False, name=None)
        )

        for _ in range(chunk_count):
            rows_chunk = itertools.islice(rows_iter, chunk_size)
            result = self.insert_rows(table, rows_chunk, selected_fields, **kwargs)
            insert_results.append(result)

        return insert_results 
Example 76
Project: wise_ils   Author: ElementAI   File: utils.py    Apache License 2.0 4 votes vote down vote up
def collate_fn_0_4(batch, level=0):
    r"""Puts each data field into a tensor with outer dimension batch size"""

    error_msg = "batch must contain tensors, numbers, dicts or lists; found {}"
    elem_type = type(batch[0])
    if (isinstance(batch[0], torch.Tensor) and
            batch[0].ndimension() == 3 and
            batch[0].dtype == torch.float32):

        batch = to_image_list(batch, 32)
        return batch

    elif isinstance(batch[0], torch.Tensor):
        out = None

        return torch.stack(batch, 0, out=out)


    elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \
            and elem_type.__name__ != 'string_':
        elem = batch[0]
        if elem_type.__name__ == 'ndarray':
            # array of string classes and object
            if re.search('[SaUO]', elem.dtype.str) is not None:
                raise TypeError(error_msg.format(elem.dtype))

            return torch.stack([torch.from_numpy(b) for b in batch], 0)
        if elem.shape == ():  # scalars
            py_type = float if elem.dtype.name.startswith('float') else int
            return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
    elif isinstance(batch[0], int_classes):
        return torch.LongTensor(batch)

    elif batch[0] is None:
        return batch

    elif isinstance(batch[0], list) and level == 1:
        return batch
    elif isinstance(batch[0], float):
        return torch.DoubleTensor(batch)
    elif isinstance(batch[0], string_classes):
        return batch
    elif isinstance(batch[0], BoxList):
        return batch
    elif isinstance(batch[0], collections.Mapping):
        return {key: collate_fn_0_4([d[key] for d in batch], level=level + 1) for key in batch[0]}
    elif isinstance(batch[0], collections.Sequence):
        transposed = zip(*batch)
        return [collate_fn_0_4(samples) for samples in transposed]

    raise TypeError((error_msg.format(type(batch[0]))))

# =============================================
# dataset utils 
Example 77
Project: python-esppy   Author: sassoftware   File: base.py    Apache License 2.0 4 votes vote down vote up
def param_iter(params):
    '''
    Iterate over all combinations of parameters

    Parameters
    ----------
    params : dict or list of dicts
        The sets of parameters

    Examples
    --------
    >>> for item in param_iter(dict(a=1, b=['x', 'y', 'z'], c=100)):
    ...      print(item)
    ...      if item['b'] == 'z':
    ...          break
    {'a': 1, b: 'x', c: 100}
    {'a': 1, b: 'y', c: 100}
    {'a': 1, b: 'z', c: 100}

    >>> for item in param_iter([dict(a=1, b='x', c=100),
    ...                         dict(a=1, b='y'),
    ...                         dict(a=2, c=200)]):
    ...    print(item)
    ...    if item['a'] == 2:
    ...        break
    {'a': 1, 'b': 'x', 'c': 100}
    {'a': 1, 'b': 'y'}
    {'a': 2, 'c': 200}

    Yields
    ------
    dict

    '''
    while True:
        if not params:
            yield {}
        elif isinstance(params, collections.Mapping):
            keys, values = zip(*params.items())
            values = list(values)
            n_items = 1
            for val in values:
                if isinstance(val, (tuple, list, set)):
                    n_items = max(n_items, len(val))
            for i, val in enumerate(values):
                if isinstance(val, six.string_types) or \
                        not isinstance(val, collections.Sequence):
                    values[i] = [val] * n_items
            for i in range(n_items):
                out = {}
                for key, value in zip(keys, values):
                    out[key] = value[i]
                yield out
        else:
            for value in params:
                yield copy.copy(value) 
Example 78
Project: opencv_transforms   Author: jbohnslav   File: functional.py    MIT License 4 votes vote down vote up
def pad(img, padding, fill=0, padding_mode='constant'):
    r"""Pad the given numpy ndarray on all sides with specified padding mode and fill value.
    Args:
        img (numpy ndarray): image to be padded.
        padding (int or tuple): Padding on each border. If a single int is provided this
            is used to pad all borders. If tuple of length 2 is provided this is the padding
            on left/right and top/bottom respectively. If a tuple of length 4 is provided
            this is the padding for the left, top, right and bottom borders
            respectively.
        fill: Pixel fill value for constant fill. Default is 0. If a tuple of
            length 3, it is used to fill R, G, B channels respectively.
            This value is only used when the padding_mode is constant
        padding_mode: Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.
            - constant: pads with a constant value, this value is specified with fill
            - edge: pads with the last value on the edge of the image
            - reflect: pads with reflection of image (without repeating the last value on the edge)
                       padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
                       will result in [3, 2, 1, 2, 3, 4, 3, 2]
            - symmetric: pads with reflection of image (repeating the last value on the edge)
                         padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
                         will result in [2, 1, 1, 2, 3, 4, 4, 3]
    Returns:
        Numpy image: padded image.
    """
    if not _is_numpy_image(img):
        raise TypeError('img should be numpy ndarray. Got {}'.format(type(img)))
    if not isinstance(padding, (numbers.Number, tuple, list)):
        raise TypeError('Got inappropriate padding arg')
    if not isinstance(fill, (numbers.Number, str, tuple)):
        raise TypeError('Got inappropriate fill arg')
    if not isinstance(padding_mode, str):
        raise TypeError('Got inappropriate padding_mode arg')
    if isinstance(padding, collections.Sequence) and len(padding) not in [2, 4]:
        raise ValueError("Padding must be an int or a 2, or 4 element tuple, not a " +
                         "{} element tuple".format(len(padding)))

    assert padding_mode in ['constant', 'edge', 'reflect', 'symmetric'], \
        'Padding mode should be either constant, edge, reflect or symmetric'

    if isinstance(padding, int):
        pad_left = pad_right = pad_top = pad_bottom = padding
    if isinstance(padding, collections.Sequence) and len(padding) == 2:
        pad_left = pad_right = padding[0]
        pad_top = pad_bottom = padding[1]
    if isinstance(padding, collections.Sequence) and len(padding) == 4:
        pad_left = padding[0]
        pad_top = padding[1]
        pad_right = padding[2]
        pad_bottom = padding[3]
    if img.shape[2]==1:
        return(cv2.copyMakeBorder(img, top=pad_top, bottom=pad_bottom, left=pad_left, right=pad_right,
                                 borderType=_cv2_pad_to_str[padding_mode], value=fill)[:,:,np.newaxis])
    else:
        return(cv2.copyMakeBorder(img, top=pad_top, bottom=pad_bottom, left=pad_left, right=pad_right,
                                     borderType=_cv2_pad_to_str[padding_mode], value=fill)) 
Example 79
Project: UnsupervisedGeometryAwareRepresentationLearning   Author: hrhodin   File: datasets.py    GNU General Public License v3.0 4 votes vote down vote up
def default_collate_with_string(batch):
    "Puts each data field into a tensor with outer dimension batch size"
    _use_shared_memory = False
    numpy_type_map = {
        'float64': torch.DoubleTensor,
        'float32': torch.FloatTensor,
        'float16': torch.HalfTensor,
        'int64': torch.LongTensor,
        'int32': torch.IntTensor,
        'int16': torch.ShortTensor,
        'int8': torch.CharTensor,
        'uint8': torch.ByteTensor,
    }
    string_classes = (str, bytes)
    if torch.is_tensor(batch[0]):
        #print("IN","torch.is_tensor(batch[0])")
        #IPython.embed()
        out = None
        if _use_shared_memory:
            # If we're in a background process, concatenate directly into a
            # shared memory tensor to avoid an extra copy
            numel = sum([x.numel() for x in batch])
            storage = batch[0].storage()._new_shared(numel)
            out = batch[0].new(storage)
        #print("batch:",[e.numpy().shape for e in batch])
        return torch.stack(batch, 0, out=out)
    elif type(batch[0]).__module__ == 'numpy':
        elem = batch[0]
        #print("IN", "type(batch[0]).__module__ == 'numpy'")
        #IPython.embed()
        if type(elem).__name__ == 'ndarray':
            if elem.dtype.kind in {'U', 'S'}:
                return np.stack(batch, 0)
            else:
                return torch.stack([torch.from_numpy(b) for b in batch], 0)
        if elem.shape == ():  # scalars
            py_type = float if elem.dtype.name.startswith('float') else int
            return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
    elif isinstance(batch[0], int):
        return torch.LongTensor(batch)
    elif isinstance(batch[0], float):
        return torch.FloatTensor(batch)
    elif isinstance(batch[0], string_classes):
        return batch
    elif isinstance(batch[0], collections.Mapping):
        return {key: default_collate_with_string([d[key] for d in batch]) for key in batch[0]}
    elif isinstance(batch[0], collections.Sequence):
        transposed = zip(*batch)
        return [default_collate_with_string(samples) for samples in transposed]

    raise TypeError(("batch must contain tensors, numbers, dicts or lists; found {}"
                     .format(type(batch[0])))) 
Example 80
Project: DeepRibo   Author: Biobix   File: functions.py    GNU General Public License v3.0 4 votes vote down vote up
def defaultCollate(batch):
    '''Puts each data field into a tensor with outer dimension batch size'
    code copied from
    https://pytorch.org/docs/master/_modules/torch/utils/data/dataloader.html#DataLoader
    and tweaked for personal use'''

    error_msg = 'batch must contain tensors, numbers, dicts or lists; found {}'
    _use_shared_memory = True
    string_classes = (str, bytes)
    int_classes = int

    elem_type = type(batch[0])
    if torch.is_tensor(batch[0]):
        pad = False
        out = None

        # if not np.all([batch[0].shape == tensor.shape for tensor in batch]):
        if batch[0].shape[0] != 4:
            pad = True
            batch_lens = np.sort([b.shape[0] for b in batch])[::-1].copy()
            sort_order = np.argsort([b.shape[0] for b in batch])[::-1].copy()
            batch = pad_sequence([batch[idx] for idx in sort_order])

            batch.unsqueeze_(2).contiguous()

        if _use_shared_memory:
            # If we're in a background process, concatenate directly into a
            # shared memory tensor to avoid an extra copy
            numel = sum([x.numel() for x in batch])
            storage = batch[0].storage()._new_shared(numel)
            out = batch[0].new(storage)

        if pad:
            # return torch.stack(batch, dim=0, out=out),
            # torch.from_numpy(batch_lens)
            return (batch, batch_lens, sort_order)
        else:
            return torch.stack(batch, dim=0, out=out)
    elif elem_type.__module__ == 'numpy' and elem_type.__name__ != 'str_' \
            and elem_type.__name__ != 'string_':
        elem = batch[0]
        if elem_type.__name__ == 'ndarray':
            # array of string classes and object
            if re.search('[SaUO]', elem.dtype.str) is not None:
                raise TypeError(error_msg.format(elem.dtype))

            return torch.stack([torch.from_numpy(b) for b in batch], 0)
        if elem.shape == ():  # scalars
            py_type = float if elem.dtype.name.startswith('float') else int
            return numpy_type_map[elem.dtype.name](list(map(py_type, batch)))
    elif isinstance(batch[0], int_classes):
        return torch.LongTensor(batch)
    elif isinstance(batch[0], float):
        return torch.DoubleTensor(batch)
    elif isinstance(batch[0], string_classes):
        return batch
    elif isinstance(batch[0], collections.Mapping):
        return {key: defaultCollate([d[key] for d in batch]) for key in batch[0]}
    elif isinstance(batch[0], collections.Sequence):
        transposed = zip(*batch)
        return [defaultCollate(samples) for samples in transposed]