Python numpy.where() Examples

The following are 30 code examples of numpy.where(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module numpy , or try the search function .
Example #1
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def splrep(coordinates, t, order, weights, smoothing, periodic):
        from scipy import interpolate
        (x,y) = coordinates
        # we need to skip anything where t[i] and t[i+1] are too close
        wh = np.where(np.isclose(np.diff(t), 0))[0]
        if len(wh) > 0:
            (t,x,y) = [np.array(u) for u in (t,x,y)]
            ii = np.arange(len(t))
            for i in reversed(wh):
                ii[i+1:-1] = ii[i+2:]
                for u in (t,x,y):
                    u[i] = np.mean(u[i:i+2])
            ii = ii[:-len(wh)]
            (t,x,y) = [u[ii] for u in (t,x,y)]
        xtck = interpolate.splrep(t, x, k=order, s=smoothing, w=weights, per=periodic)
        ytck = interpolate.splrep(t, y, k=order, s=smoothing, w=weights, per=periodic)
        return tuple([tuple([pimms.imm_array(u) for u in tck])
                      for tck in (xtck,ytck)]) 
Example #2
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def dataframe_select(df, *cols, **filters):
    '''
    dataframe_select(df, k1=v1, k2=v2...) yields df after selecting all the columns in which the
      given keys (k1, k2, etc.) have been selected such that the associated columns in the dataframe
      contain only the rows whose cells match the given values.
    dataframe_select(df, col1, col2...) selects the given columns.
    dataframe_select(df, col1, col2..., k1=v1, k2=v2...) selects both.
    
    If a value is a tuple/list of 2 elements, then it is considered a range where cells must fall
    between the values. If value is a tuple/list of more than 2 elements or is a set of any length
    then it is a list of values, any one of which can match the cell.
    '''
    ii = np.ones(len(df), dtype='bool')
    for (k,v) in six.iteritems(filters):
        vals = df[k].values
        if   pimms.is_set(v):                    jj = np.isin(vals, list(v))
        elif pimms.is_vector(v) and len(v) == 2: jj = (v[0] <= vals) & (vals < v[1])
        elif pimms.is_vector(v):                 jj = np.isin(vals, list(v))
        else:                                    jj = (vals == v)
        ii = np.logical_and(ii, jj)
    if len(ii) != np.sum(ii): df = df.loc[ii]
    if len(cols) > 0: df = df[list(cols)]
    return df 
Example #3
Source File: group_sampler.py    From mmdetection with Apache License 2.0 6 votes vote down vote up
def __iter__(self):
        indices = []
        for i, size in enumerate(self.group_sizes):
            if size == 0:
                continue
            indice = np.where(self.flag == i)[0]
            assert len(indice) == size
            np.random.shuffle(indice)
            num_extra = int(np.ceil(size / self.samples_per_gpu)
                            ) * self.samples_per_gpu - len(indice)
            indice = np.concatenate(
                [indice, np.random.choice(indice, num_extra)])
            indices.append(indice)
        indices = np.concatenate(indices)
        indices = [
            indices[i * self.samples_per_gpu:(i + 1) * self.samples_per_gpu]
            for i in np.random.permutation(
                range(len(indices) // self.samples_per_gpu))
        ]
        indices = np.concatenate(indices)
        indices = indices.astype(np.int64).tolist()
        assert len(indices) == self.num_samples
        return iter(indices) 
Example #4
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def to_potential(f):
    '''
    to_potential(f) yields f if f is a potential function; if f is not, but f can be converted to
      a potential function, that conversion is performed then the result is yielded.
    to_potential(Ellipsis) yields a potential function whose output is simply its input (i.e., the
      identity function).
    to_potential(None) is equivalent to to_potential(0).

    The following can be converted into potential functions:
      * Anything for which pimms.is_array(x, 'number') yields True (i.e., arrays of constants).
      * Any tuple (g, h) where g(x) yields a potential value and h(x) yields a jacobian matrix for
        the parameter vector x.
    '''
    if   is_potential(f): return f
    elif f is Ellipsis:   return identity
    elif pimms.is_array(f, 'number'): return const_potential(f)
    elif isinstance(f, tuple) and len(f) == 2: return PotentialLambda(f[0], f[1])
    else: raise ValueError('Could not convert object of type %s to potential function' % type(f)) 
Example #5
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def signed_face_areas(faces, axis=1):
    '''
    signed_face_areas(faces) yields a potential function f(x) that calculates the signed area of
      each face represented by the simplices matrix faces.

    If faces is None, then the parameters must arrive in the form of a flattened (n x 3 x 2) matrix
    where n is the number of triangles. Otherwise, the faces matrix must be either (n x 3) or (n x 3
    x s); if the former, each row must list the vertex indices for the faces where the vertex matrix
    is presumed to be shaped (V x 2). Alternately, faces may be a full (n x 3 x 2) simplex array of
    the indices into the parameters.

    The optional argument axis (default: 1) may be set to 0 if the faces argument is a matrix but
    the coordinate matrix will be (2 x V) instead of (V x 2).
    '''
    faces = np.asarray(faces)
    if len(faces.shape) == 2:
        if faces.shape[1] != 3: faces = faces.T
        n = 2 * (np.max(faces) + 1)
        if axis == 0: tmp = np.reshape(np.arange(n), (2,-1)).T
        else:         tmp = np.reshape(np.arange(n), (-1,2))
        faces = np.reshape(tmp[faces.flat], (-1,3,2))
    faces = faces.flatten()
    return compose(TriangleSignedArea2DPotential(), part(Ellipsis, faces)) 
Example #6
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def face_areas(faces, axis=1):
    '''
    face_areas(faces) yields a potential function f(x) that calculates the unsigned area of each
      faces represented by the simplices matrix faces.

    If faces is None, then the parameters must arrive in the form of a flattened (n x 3 x 2) matrix
    where n is the number of triangles. Otherwise, the faces matrix must be either (n x 3) or (n x 3
    x s); if the former, each row must list the vertex indices for the faces where the vertex matrix
    is presumed to be shaped (V x 2). Alternately, faces may be a full (n x 3 x 2) simplex array of
    the indices into the parameters.

    The optional argument axis (default: 1) may be set to 0 if the faces argument is a matrix but
    the coordinate matrix will be (2 x V) instead of (V x 2).
    '''
    faces = np.asarray(faces)
    if len(faces.shape) == 2:
        if faces.shape[1] != 3: faces = faces.T
        n = 2 * (np.max(faces) + 1)
        if axis == 0: tmp = np.reshape(np.arange(n), (2,-1)).T
        else:         tmp = np.reshape(np.arange(n), (-1,2))
        faces = np.reshape(tmp[faces.flat], (-1,3,2))
    faces = faces.flatten()
    return compose(TriangleArea2DPotential(), part(Ellipsis, faces)) 
Example #7
Source File: cgp.py    From cgp-cnn with MIT License 6 votes vote down vote up
def _evaluation(self, pop, eval_flag):
        # create network list
        net_lists = []
        active_index = np.where(eval_flag)[0]
        for i in active_index:
            net_lists.append(pop[i].active_net_list())

        # evaluation
        fp = self.eval_func(net_lists)
        for i, j in enumerate(active_index):
            pop[j].eval = fp[i]
        evaluations = np.zeros(len(pop))
        for i in range(len(pop)):
            evaluations[i] = pop[i].eval

        self.num_eval += len(net_lists)
        return evaluations 
Example #8
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def arccosine(x, null=(-np.inf, np.inf)):
    '''
    arccosine(x) is equivalent to acos(x) except that it also works on sparse arrays.

    The optional argument null (default, (-numpy.inf, numpy.inf)) may be specified to indicate what
    value(s) should be assigned when x < -1 or x > 1. If only one number is given, then it is used
    for both values; otherwise the first value corresponds to <-1 and the second to >1.  If null is
    None, then an error is raised when invalid values are encountered.
    '''
    if sps.issparse(x): x = x.toarray()
    else:               x = np.asarray(x)
    try:    (nln,nlp) = null
    except Exception: (nln,nlp) = (null,null)
    ii = None if nln is None else np.where(x < -1)
    jj = None if nlp is None else np.where(x > 1)
    if ii: x[ii] = 0
    if jj: x[jj] = 0
    x = np.arccos(x)
    if ii: x[ii] = nln
    if jj: x[jj] = nlp
    return x 
Example #9
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def to_cortex(c):
    '''
    to_cortex(c) yields a Cortex object if the argument c can be coerced to one and otherwise raises
      an error.

    An object can be coerced to a Cortex object if:
      * it is a cortex object
      * it is a tuple (subject, h) where subject is a subject object and h is a subject hemisphere.
    '''
    if is_cortex(c): return c
    elif pimms.is_vector(c) and len(c) == 2:
        (s,h) = c
        if is_subject(s) and pimms.is_str(h):
            if h in s.hemis: return s.hemis[h]
            else: raise ValueError('to_cortex: hemi %s not found in given subject' % h)
    raise ValueError('Could not coerce argument to Cortex object')

####################################################################################################
# These functions deal with cortex_to_image and image_to_cortex interpolation: 
Example #10
Source File: nistats.py    From NiBetaSeries with MIT License 6 votes vote down vote up
def _lsa_events_converter(events_file):
    """Make a model where each trial has its own regressor using least squares
    all (LSA)

    Parameters
    ----------
    events_file : str
        File that contains all events from the bold run

    Yields
    ------
    events : DataFrame
        A DataFrame in which each trial has its own trial_type
    """

    import pandas as pd
    events = pd.read_csv(events_file, sep='\t')
    events['original_trial_type'] = events['trial_type']
    for cond, cond_df in events.groupby('trial_type'):
        cond_idx = cond_df.index
        for i_trial, trial_idx in enumerate(cond_idx):
            trial_name = '{0}_{1:04d}'.format(cond, i_trial+1)
            events.loc[trial_idx, 'trial_type'] = trial_name
    return events 
Example #11
Source File: train_val.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def filter_roidb(roidb):
  """Remove roidb entries that have no usable RoIs."""

  def is_valid(entry):
    # Valid images have:
    #   (1) At least one foreground RoI OR
    #   (2) At least one background RoI
    overlaps = entry['max_overlaps']
    # find boxes with sufficient overlap
    fg_inds = np.where(overlaps >= cfg.TRAIN.FG_THRESH)[0]
    # Select background RoIs as those within [BG_THRESH_LO, BG_THRESH_HI)
    bg_inds = np.where((overlaps < cfg.TRAIN.BG_THRESH_HI) &
                       (overlaps >= cfg.TRAIN.BG_THRESH_LO))[0]
    # image is only valid if such boxes exist
    valid = len(fg_inds) > 0 or len(bg_inds) > 0
    return valid

  num = len(roidb)
  filtered_roidb = [entry for entry in roidb if is_valid(entry)]
  num_after = len(filtered_roidb)
  print('Filtered {} roidb entries: {} -> {}'.format(num - num_after,
                                                     num, num_after))
  return filtered_roidb 
Example #12
Source File: Collection.py    From fullrmc with GNU Affero General Public License v3.0 6 votes vote down vote up
def collect(self, index, dataDict, check=True):
        """
        Collect atom given its index.

        :Parameters:
            #. index (int): The atom index to collect.
            #. dataDict (dict): The atom data dict to collect.
            #. check (boolean):  Whether to check dataDict keys before
               collecting. If set to False, user promises that collected
               data is a dictionary and contains the needed keys.
        """
        assert not self.is_collected(index), LOGGER.error("attempting to collect and already collected atom of index '%i'"%index)
        # add data
        if check:
            assert isinstance(dataDict, dict), LOGGER.error("dataDict must be a dictionary of data where keys are dataKeys")
            assert tuple(sorted(dataDict)) == self.__dataKeys, LOGGER.error("dataDict keys don't match promised dataKeys")
        self.__collectedData[index] = dataDict
        # set indexes sorted array
        idx = np.searchsorted(a=self.__indexesSortedArray, v=index, side='left')
        self.__indexesSortedArray = np.insert(self.__indexesSortedArray, idx, index)
        # set state
        self.__state = str(uuid.uuid1()) 
Example #13
Source File: test.py    From MomentumContrast.pytorch with MIT License 6 votes vote down vote up
def show(mnist, targets, ret):
    target_ids = range(len(set(targets)))
    
    colors = ['r', 'g', 'b', 'c', 'm', 'y', 'k', 'violet', 'orange', 'purple']
    
    plt.figure(figsize=(12, 10))
    
    ax = plt.subplot(aspect='equal')
    for label in set(targets):
        idx = np.where(np.array(targets) == label)[0]
        plt.scatter(ret[idx, 0], ret[idx, 1], c=colors[label], label=label)
    
    for i in range(0, len(targets), 250):
        img = (mnist[i][0] * 0.3081 + 0.1307).numpy()[0]
        img = OffsetImage(img, cmap=plt.cm.gray_r, zoom=0.5) 
        ax.add_artist(AnnotationBbox(img, ret[i]))
    
    plt.legend()
    plt.show() 
Example #14
Source File: Collection.py    From fullrmc with GNU Affero General Public License v3.0 5 votes vote down vote up
def get_data_by_key(self, key):
        """
        Get all collected atoms data that is associated with a key.

        :Parameters:
            #. key (int): the data key.

        :Parameters:
            #. data (dict): dictionary of atoms indexes where values are the collected data.
        """
        data = {}
        for k in self.__collectedData:
            data[k] = self.__collectedData[k][key]
        return data 
Example #15
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def nanlog(x, null=np.nan):
    '''
    nanlog(x) is equivalent to numpy.log(x) except that it avoids calling log on 0 and non-finie
      values; in place of these values, it returns the value null (which is nan by default).
    '''
    x = np.asarray(x)
    ii0 = np.where(np.isfinite(x))
    ii  = np.where(x[ii0] > 0)[0]
    if len(ii) == numel(x): return np.log(x)
    res = np.full(x.shape, null)
    ii = tuple([u[ii] for u in ii0])
    res[ii] = np.log(x[ii])
    return res 
Example #16
Source File: LogisticRegression.py    From fuku-ml with MIT License 5 votes vote down vote up
def theta(self, s):

        '''
        Theta sigmoid function
        '''

        s = np.where(s < -709, -709, s)

        return 1 / (1 + np.exp((-1) * s)) 
Example #17
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def even_out(self, precision=0.001):
        '''
        Yields an equivalent curve but where the parametric value t
        is equivalent to x/y distance (up to the given precision).
        '''
        dists = [self.curve_length(s, e, precision=precision)
                 for (s,e) in zip(self.t[:-1], self.t[1:])]
        return CurveSpline(self.coordinates,
                           order=self.order,
                           weights=self.weights,
                           smoothing=self.smoothing,
                           periodic=self.periodic,
                           distances=dists,
                           meta_data=self.meta_data) 
Example #18
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def curve_intersection(c1, c2, grid=16):
    '''
    curve_intersect(c1, c2) yields the parametric distances (t1, t2) such that c1(t1) == c2(t2).
      
    The optional parameter grid may specify the number of grid-points
    to use in the initial search for a start-point (default: 16).
    '''
    from scipy.optimize import minimize
    from neuropythy.geometry import segment_intersection_2D
    if c1.coordinates.shape[1] > c2.coordinates.shape[1]:
        (t1,t2) = curve_intersection(c2, c1, grid=grid)
        return (t2,t1)
    # before doing a search, see if there are literal exact intersections of the segments
    x1s  = c1.coordinates.T
    x2s  = c2.coordinates
    for (ts,te,xs,xe) in zip(c1.t[:-1], c1.t[1:], x1s[:-1], x1s[1:]):
        pts = segment_intersection_2D((xs,xe), (x2s[:,:-1], x2s[:,1:]))
        ii = np.where(np.isfinite(pts[0]))[0]
        if len(ii) > 0:
            ii = ii[0]
            def f(t): return np.sum((c1(t[0]) - c2(t[1]))**2)
            t01 = 0.5*(ts + te)
            t02 = 0.5*(c2.t[ii] + c2.t[ii+1])
            (t1,t2) = minimize(f, (t01, t02)).x
            return (t1,t2)
    if pimms.is_vector(grid): (ts1,ts2) = [c.t[0] + (c.t[-1] - c.t[0])*grid for c in (c1,c2)]
    else:                     (ts1,ts2) = [np.linspace(c.t[0], c.t[-1], grid) for c in (c1,c2)]
    (pts1,pts2) = [c(ts) for (c,ts) in zip([c1,c2],[ts1,ts2])]
    ds = np.sqrt([np.sum((pts2.T - pp)**2, axis=1) for pp in pts1.T])
    (ii,jj) = np.unravel_index(np.argmin(ds), ds.shape)
    (t01,t02) = (ts1[ii], ts2[jj])
    ttt = []
    def f(t): return np.sum((c1(t[0]) - c2(t[1]))**2)
    (t1,t2) = minimize(f, (t01, t02)).x
    return (t1,t2) 
Example #19
Source File: Collection.py    From fullrmc with GNU Affero General Public License v3.0 5 votes vote down vote up
def rebin(data, bin=0.05, check=False):
    """
    Re-bin 2D data of shape (N,2). In general, fullrmc requires equivalently
    spaced experimental data bins. This function can be used to recompute
    any type of experimental data according to a set bin size.

    :Parameters:
        #. data (numpy.ndarray): The (N,2) shape data where first column is
           considered experimental data space values (e.g. r, q) and
           second column experimental data values.
        #. bin (number): New desired bin size.
        #. check (boolean): whether to check arguments before rebining.

    :Returns:
        #. X (numpy.ndarray): First column re-binned.
        #. Y (numpy.ndarray): Second column re-binned.
    """
    if check:
        assert isinstance(data, np.ndarray), Logger.error("data must be numpy.ndarray instance")
        assert len(data.shape)==2, Logger.error("data must be of 2 dimensions")
        assert data.shape[1] ==2, Logger.error("data must have 2 columns")
        assert is_number(bin), LOGGER.error("bin must be a number")
        bin = float(bin)
        assert bin>0, LOGGER.error("bin must be a positive")
    # rebin
    x = data[:,0].astype(float)
    y = data[:,1].astype(float)
    rx = []
    ry = []
    x0 = int(x[0]/bin)*bin-bin/2.
    xn = int(x[-1]/bin)*bin+bin/2.
    bins = np.arange(x0,xn, bin)
    if bins[-1] != xn:
        bins = np.append(bins, xn)
    # get weights histogram
    W,E = np.histogram(x, bins=bins)
    W[np.where(W==0)[0]] = 1
    # get data histogram
    S,E = np.histogram(x, bins=bins, weights=y)
    # return
    return (E[1:]+E[:-1])/2., S/W 
Example #20
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def lh_gray_indices(lh_gray_mask):
        '''
        sub.lh_gray_indices is equivalent to numpy.where(sub.lh_gray_mask).
        '''
        if lh_gray_mask is None: return None
        if is_image(lh_gray_mask): lh_gray_mask = lh_gray_mask.dataobj
        return tuple([pimms.imm_array(x) for x in np.where(lh_gray_mask)]) 
Example #21
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def jacobian(self, params, into=None):
        params = flattest(params)
        n = len(params)
        ii = np.arange(n)
        (rs,cs,zs) = ([],[],[])
        for ((mn,mx), f) in self.pieces_with_default:
            if len(ii) == 0: break
            k = np.where((params >= mn) & (params <= mx))[0]
            if len(k) == 0: continue
            kk = ii[k]
            j = f.jacobian(params[k])
            if j.shape[0] == 1 and j.shape[1] > 1: j = repmat(j, j.shape[1], 1)
            (rj,cj,vj) = sps.find(j)
            rs.append(kk[rj])
            cs.append(kk[cj])
            zs.append(vj)
            ii = np.delete(ii, k)
            params = np.delete(params, k)
        (rs,cs,zs) = [np.concatenate(us) if len(us) > 0 else [] for us in (rs,cs,zs)]
        dz = sps.csr_matrix((zs, (rs,cs)), shape=(n,n))
        return safe_into(into, dz) 
Example #22
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def output_indices(ii):
        ii = flattest(ii)
        if (np.issubdtype(ii.dtype, np.dtype('bool').type)): ii = np.where(ii)[0]
        return pimms.imm_array(ii) 
Example #23
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def __call__(self, params):
        '''
        pf(params) yields the tuple (z, dz) where z is the potential value at the given parameters
          vector, params, and dz is the vector of the potential gradient.
        '''
        z  = self.value(params)
        dz = self.jacobian(params)
        if sps.issparse(dz): dz = dz.toarray()
        z  = np.squeeze(z)
        dz = np.squeeze(dz)
        return (z,dz) 
Example #24
Source File: util.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def triangle_address(fx, pt):
    '''
    triangle_address(FX, P) yields an address coordinate (t,r) for the point P in the triangle
    defined by the (3 x d)-sized coordinate matrix FX, in which each row of the matrix is the
    d-dimensional vector representing the respective triangle vertx for triangle [A,B,C]. The
    resulting coordinates (t,r) (0 <= t <= 1, 0 <= r <= 1) address the point P such that, if t gives
    the fraction of the angle from vector AB to vector AC that is made by the angle between vectors
    AB and AP, and r gives the fraction ||AP||/||AR|| where R is the point of intersection between
    lines AP and BC. If P is a (d x n)-sized matrix of points, then a (2 x n) matrix of addresses
    is returned.
    '''
    fx = np.asarray(fx)
    pt = np.asarray(pt)
    # The triangle vectors...
    ab = fx[1] - fx[0]
    ac = fx[2] - fx[0]
    bc = fx[2] - fx[1]
    ap = np.asarray([pt_i - a_i for (pt_i, a_i) in zip(pt, fx[0])])
    # get the unnormalized distance...
    r = np.sqrt((ap ** 2).sum(0))
    # now we can find the angle...
    unit = 1 - r.astype(bool)
    t0 = vector_angle(ab, ac)
    t = vector_angle(ap + [ab_i * unit for ab_i in ab], ab)
    sint = np.sin(t)
    sindt = np.sin(t0 - t)
    # finding r0 is tricker--we use this fancy formula based on the law of sines
    q0 = np.sqrt((bc ** 2).sum(0))          # B->C distance
    beta = vector_angle(-ab, bc)            # Angle at B
    sinGamma = np.sin(math.pi - beta - t0)
    sinBeta  = np.sin(beta)
    r0 = q0 * sinBeta * sinGamma / (sinBeta * sindt + sinGamma * sint)
    return np.asarray([t/t0, r/r0]) 
Example #25
Source File: util.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def segment_intersection_2D(p12arg, p34arg, atol=1e-8):
    '''
    segment_intersection((a, b), (c, d)) yields the intersection point between the line segments
    that pass from point a to point b and from point c to point d. If there is no intersection
    point, then (numpy.nan, numpy.nan) is returned.
    '''
    (p1,p2) = p12arg
    (p3,p4) = p34arg
    pi = np.asarray(line_intersection_2D(p12arg, p34arg, atol=atol))
    p1 = np.asarray(p1)
    p2 = np.asarray(p2)
    p3 = np.asarray(p3)
    p4 = np.asarray(p4)
    u12 = p2 - p1
    u34 = p4 - p3
    cfn = lambda px,iis: (px if iis is None or len(px.shape) == 1 or px.shape[1] == len(iis) else
                          px[:,iis])
    dfn = lambda a,b:     a[0]*b[0] + a[1]*b[1]
    sfn = lambda a,b:     ((a-b)                 if len(a.shape) == len(b.shape) else
                           (np.transpose([a])-b) if len(a.shape) <  len(b.shape) else
                           (a - np.transpose([b])))
    fn  = lambda px,iis:  (1 - ((dfn(cfn(u12,iis), sfn(         px, cfn(p1,iis))) > 0) *
                                (dfn(cfn(u34,iis), sfn(         px, cfn(p3,iis))) > 0) *
                                (dfn(cfn(u12,iis), sfn(cfn(p2,iis),          px)) > 0) *
                                (dfn(cfn(u34,iis), sfn(cfn(p4,iis),          px)) > 0)))
    if len(pi.shape) == 1:
        if not np.isfinite(pi[0]): return (np.nan, np.nan)
        bad = fn(pi, None)
        return (np.nan, np.nan) if bad else pi
    else:
        nonpar = np.where(np.isfinite(pi[0]))[0]
        bad = fn(cfn(pi, nonpar), nonpar)
        (xi,yi) = pi
        bad = nonpar[np.where(bad)[0]]
        xi[bad] = np.nan
        yi[bad] = np.nan
        return (xi,yi) 
Example #26
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def unaddress(self, data, surface=0.5):
        '''
        cortex.unaddress(address) yields the (3 x n) coordinate matrix of the given addresses (or,
          if address is singular, the 3D vector) in the given cortex. If the address is a 2D instead
          of a 3D address, then the mid-gray position is returned by default.

        The following options may be given:
          * surface (default: 0.5) specifies the surface to use for 2D addresses; this should be
            either 'white', 'pial', 'midgray', or a real number in the range [0,1] where 0 is the
            white surface and 1 is the pial surface.
        '''
        (faces, coords) = address_data(data, 3, surface=surface)
        (bc, ds) = (coords[:2], coords[2])
        faces = self.tess.index(faces)
        (wx, px) = (self.white_surface.coordinates, self.pial_surface.coordinates)
        if all(len(np.shape(x)) > 1 for x in (faces, coords)):
            (wtx, ptx) = [
                np.transpose([sx[:,ff] if ff[0] >= 0 else null for ff in faces.T], (2,1,0))
                for null in [np.full((3, wx.shape[0]), np.nan)]
                for sx   in (wx, px)]
        elif faces == -1:
            return np.full(selfx.shape[0], np.nan)
        else:
            (wtx, ptx) = [sx[:,faces].T for sx in (wx, px)]
        (wu, pu) = [geo.barycentric_to_cartesian(tx, bc) for tx in (wtx, ptx)]
        return wu*ds + pu*(1 - ds) 
Example #27
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def rh_white_indices(rh_white_mask):
        '''
        sub.rh_white_indices is a frozenset of the indices of the white voxels in the given
        subject's rh, represented as 3-tuples.
        '''
        if rh_white_mask is None: return None
        if is_image(rh_white_mask): rh_white_mask = rh_white_mask.dataobj
        idcs = np.transpose(np.where(rh_white_mask))
        return frozenset([tuple(row) for row in idcs]) 
Example #28
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def lh_white_indices(lh_white_mask):
        '''
        sub.lh_white_indices is a frozenset of the indices of the white voxels in the given
        subject's lh, represented as 3-tuples.
        '''
        if lh_white_mask is None: return None
        if is_image(lh_white_mask): lh_white_mask = lh_white_mask.dataobj
        idcs = np.transpose(np.where(lh_white_mask))
        return frozenset([tuple(row) for row in idcs]) 
Example #29
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 5 votes vote down vote up
def rh_gray_indices(rh_gray_mask):
        '''
        sub.rh_gray_indices is equivalent to numpy.where(sub.rh_gray_mask).
        '''
        if rh_gray_mask is None: return None
        if is_image(rh_gray_mask): rh_gray_mask = rh_gray_mask.dataobj
        return tuple([pimms.imm_array(x) for x in np.where(rh_gray_mask)]) 
Example #30
Source File: cli.py    From RF-Monitor with GNU General Public License v2.0 5 votes vote down vote up
def __on_scan_data(self, event):
        levels = numpy.log10(event['l'])
        levels *= 10

        noise = numpy.percentile(levels,
                                 self._dynP)

        for monitor in self._monitors:
            freq = monitor.get_frequency()
            if monitor.get_enabled():
                monitor.set_noise(noise)
                index = numpy.where(freq == event['f'])[0]
                signal = monitor.set_level(levels[index][0],
                                           event['timestamp'],
                                           self._location)

                if signal is not None:
                    signals = 'Signals: {}\r'.format(self.__count_signals() -
                                                     self._signalCount)
                    self.__std_out(signals, False)
                    if signal.end is not None:
                        recording = format_recording(freq, signal)
                        if self._pushUri is not None:
                            self._push.send(self._pushUri,
                                            recording)
                        if self._server is not None:
                            self._server.send(recording)
                        if self._json:
                            sys.stdout.write(recording + '\n')