Java Code Examples for org.apache.cassandra.dht.IPartitioner#getTokenValidator()

The following examples show how to use org.apache.cassandra.dht.IPartitioner#getTokenValidator() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RangeUtils.java    From deep-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Gets the list of token for each cluster machine.<br/>
 * The concrete class of the token depends on the partitioner used.<br/>
 *
 * @param query           the query to execute against the given session to obtain the list of tokens.
 * @param sessionWithHost the pair object containing both the session and the name of the machine to which we're connected to.
 * @param partitioner     the partitioner used in the cluster.
 * @return a map containing, for each cluster machine, the list of tokens. Tokens are not returned in any particular
 * order.
 */
static Map<String, Iterable<Comparable>> fetchTokens(String query, final Pair<Session, String> sessionWithHost,
                                                     IPartitioner partitioner) {

    ResultSet rSet = sessionWithHost.left.execute(query);

    final AbstractType tkValidator = partitioner.getTokenValidator();
    final Map<String, Iterable<Comparable>> tokens = Maps.newHashMap();

    Iterable<Pair<String, Iterable<Comparable>>> pairs =
            transform(rSet.all(), new FetchTokensRowPairFunction(sessionWithHost, tkValidator));

    for (Pair<String, Iterable<Comparable>> pair : pairs) {
        tokens.put(pair.left, pair.right);
    }

    return tokens;
}
 
Example 2
Source File: ThriftRangeUtils.java    From deep-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Builds a new {@link ThriftRangeUtils}.
 *
 * @param partitioner  the partitioner.
 * @param host         the host address.
 * @param rpcPort      the host RPC port.
 * @param keyspace     the keyspace name.
 * @param columnFamily the column family name.
 * @param splitSize    the number of rows per split.
 */
public ThriftRangeUtils(IPartitioner partitioner,
                        String host,
                        int rpcPort,
                        String keyspace,
                        String columnFamily,
                        int splitSize) {
    this.host = host;
    this.rpcPort = rpcPort;
    this.splitSize = splitSize;
    this.keyspace = keyspace;
    this.columnFamily = columnFamily;
    tokenType = partitioner.getTokenValidator();
    tokenFactory = partitioner.getTokenFactory();
    minToken = (Comparable) partitioner.getMinimumToken().token;
}
 
Example 3
Source File: ThriftRangeUtilsTest.java    From deep-spark with Apache License 2.0 6 votes vote down vote up
private static <K extends Comparable, T extends Token<K>> void testDeepTokenRanges(IPartitioner<T> partitioner,
                                                                                   K startToken,
                                                                                   K endToken,
                                                                                   List<String> endpoints,
                                                                                   List<DeepTokenRange> expectedRanges) {

    ThriftRangeUtils utils = new ThriftRangeUtils(partitioner, "", 0, "", "", 0);

    Token.TokenFactory tokenFactory = partitioner.getTokenFactory();
    AbstractType tokenType = partitioner.getTokenValidator();
    String start = tokenFactory.toString(tokenFactory.fromByteArray(tokenType.decompose(startToken)));
    String end = tokenFactory.toString(tokenFactory.fromByteArray(tokenType.decompose(endToken)));
    CfSplit thriftSplit = new CfSplit(start, end, 0);
    List<DeepTokenRange> actualRanges = utils.deepTokenRanges(Arrays.asList(thriftSplit), endpoints);
    assertEquals(actualRanges, expectedRanges);
}
 
Example 4
Source File: RangeUtils.java    From deep-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Returns the token ranges that will be mapped to Spark partitions.
 *
 * @param config the Deep configuration object.
 * @return the list of computed token ranges.
 */
public static List<DeepTokenRange> getSplitsBySize(
        CassandraDeepJobConfig config) {

    IPartitioner p = getPartitioner(config);
    AbstractType tokenValidator = p.getTokenValidator();

    Pair<Session, String> sessionWithHost = CassandraClientProvider
            .getSession(config.getHost(), config, false);

    String query = new StringBuilder("CALCULATE SPLITS FROM ")
            .append(config.getKeyspace()).append(".")
            .append(config.getTable()).append(" ESTIMATING ")
            .append(config.getSplitSize()).toString();
    ResultSet rSet = sessionWithHost.left.execute(query);

    List<DeepTokenRange> tokens = new ArrayList<>();

    for (Row row : rSet.all()) {
        Comparable startToken = (Comparable) tokenValidator.compose(row
                .getBytesUnsafe("start_token"));
        Comparable endToken = (Comparable) tokenValidator.compose(row
                .getBytesUnsafe("end_token"));
        List<String> replicas = new ArrayList<>();
        for (InetAddress addres : row.getList("preferred_locations",
                InetAddress.class)) {
            replicas.add(addres.getHostName());
        }
        tokens.add(new DeepTokenRange(startToken, endToken, replicas));
    }
    return tokens;
}