me.prettyprint.hector.api.exceptions.HInvalidRequestException Java Examples

The following examples show how to use me.prettyprint.hector.api.exceptions.HInvalidRequestException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractSearch.java    From usergrid with Apache License 2.0 6 votes vote down vote up
/**
 * This method intentionally swallows ordered execution issues.  For some reason, our Time UUID ordering does
 * not agree with the cassandra comparator as our micros get very close
 * @param query
 * @param <K>
 * @param <UUID>
 * @param <V>
 * @return
 */
protected static <K, UUID, V> List<HColumn<UUID, V>> swallowOrderedExecution( final SliceQuery<K, UUID, V> query ) {
    try {

        return query.execute().get().getColumns();
    }
    catch ( HInvalidRequestException e ) {
        //invalid request.  Occasionally we get order issues when there shouldn't be, disregard them.

        final Throwable invalidRequestException = e.getCause();

        if ( invalidRequestException instanceof InvalidRequestException
                //we had a range error
                && ( ( InvalidRequestException ) invalidRequestException ).getWhy().contains(
                "range finish must come after start in the order of traversal" )) {
            return Collections.emptyList();
        }

        throw e;
    }
}
 
Example #2
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
/**
 * if the repository doesn't exist it will be created
 *
 * @param repositoryId
 * @return
 */
public Repository getOrCreateRepository( String repositoryId )
    throws MetadataRepositoryException
{
    String cf = cassandraArchivaManager.getRepositoryFamilyName();

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, StringSerializer.get(), StringSerializer.get(),
                                 StringSerializer.get() ) //
        .setColumnFamily( cf ) //
        .setColumnNames( REPOSITORY_NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .execute();

    if ( result.get().getCount() < 1 )
    {
        // we need to create the repository
        Repository repository = new Repository( repositoryId );

        try
        {
            MutationResult mutationResult = HFactory.createMutator( keyspace, StringSerializer.get() ) //
                .addInsertion( repositoryId, cf,
                               CassandraUtils.column( REPOSITORY_NAME.toString(), repository.getName() ) ) //
                .execute();
            logger.debug( "time to insert repository: {}", mutationResult.getExecutionTimeMicro() );
            return repository;
        }
        catch ( HInvalidRequestException e )
        {
            logger.error( e.getMessage(), e );
            throw new MetadataRepositoryException( e.getMessage(), e );
        }

    }

    return new Repository(
        result.get().getList().get( 0 ).getColumnSlice().getColumnByName( REPOSITORY_NAME.toString() ).getValue() );
}
 
Example #3
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
private Namespace updateOrAddNamespace( String repositoryId, String namespaceId )
    throws MetadataRepositoryException
{
    try
    {
        Repository repository = getOrCreateRepository( repositoryId );

        String key =
            new Namespace.KeyBuilder().withNamespace( namespaceId ).withRepositoryId( repositoryId ).build();

        Namespace namespace = getNamespace( repositoryId, namespaceId );
        if ( namespace == null )
        {
            String cf = cassandraArchivaManager.getNamespaceFamilyName();
            namespace = new Namespace( namespaceId, repository );
            HFactory.createMutator( keyspace, StringSerializer.get() )
                //  values
                .addInsertion( key, cf, CassandraUtils.column( NAME.toString(), namespace.getName() ) ) //
                .addInsertion( key, cf, CassandraUtils.column( REPOSITORY_NAME.toString(), repository.getName() ) ) //
                .execute();
        }

        return namespace;
    }
    catch ( HInvalidRequestException e )
    {
        logger.error( e.getMessage(), e );
        throw new MetadataRepositoryException( e.getMessage(), e );
    }
}
 
Example #4
Source File: Cassandra12xTripleIndexDAO.java    From cumulusrdf with Apache License 2.0 4 votes vote down vote up
/**
 * Creates a column family definition.
 * 
 * @param cfName the column family name.
 * @param indexedCols names of columns that will be indexed.
 * @param keyComp the key comparator.
 * @param valueValidationClass the value validation class.
 * @param compositeCol a flag that indicates if columns are composite.
 * @return the column family definition.
 */
protected ColumnFamilyDefinition createCF(
		final String cfName,
		final List<byte[]> indexedCols,
		final ComparatorType keyComp,
		final ComparatorType valueValidationClass,
		final boolean compositeCol) {

	final ColumnFamilyDefinition cfdef = HFactory.createColumnFamilyDefinition(
			_dataAccessLayerFactory.getKeyspaceName(), 
			cfName, 
			compositeCol
				? ComparatorType.COMPOSITETYPE
				: ComparatorType.BYTESTYPE);
	cfdef.setKeyspaceName(_dataAccessLayerFactory.getKeyspaceName());
	cfdef.setColumnType(ColumnType.STANDARD);
	cfdef.setCompactionStrategy("LeveledCompactionStrategy");

	if (compositeCol) {
		cfdef.setComparatorTypeAlias("(BytesType, BytesType, BytesType)");
	}

	for (byte[] col : indexedCols) {
		final String indexColumnFamilyName = "index_" + cfName + "_" + Arrays.hashCode(col);
		try {
			_dataAccessLayerFactory.getCluster().dropColumnFamily(
					_dataAccessLayerFactory.getKeyspaceName(),
					indexColumnFamilyName,
					true);
		} catch (HInvalidRequestException ignore) {
			// Nothing to be done here...
		}
		cfdef.addColumnDefinition(createCDef(col, valueValidationClass.getClassName(), indexColumnFamilyName));
	}

	cfdef.setKeyValidationClass(keyComp.getClassName());
	cfdef.setDefaultValidationClass(valueValidationClass.getClassName());
	cfdef.setCompressionOptions(_compressionOptions);

	return new ThriftCfDef(cfdef);
}