me.prettyprint.hector.api.query.QueryResult Java Examples

The following examples show how to use me.prettyprint.hector.api.query.QueryResult. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OpsCiStateDao.java    From oneops with Apache License 2.0 6 votes vote down vote up
public Map<Long,Map<String,Long>> getComponentStates(List<Long> manifestIds) {
  	
  	Map<Long,Map<String,Long>> result = new HashMap<Long,Map<String,Long>>();
  	MultigetSliceCounterQuery<Long, String> query =  HFactory.createMultigetSliceCounterQuery(keyspace, longSerializer, stringSerializer);
  	
  	query.setKeys(manifestIds);		
  	query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF);
  	query.setRange(null, null, false, 1000);
  	QueryResult<CounterRows<Long,String>> qResult = query.execute();

  	CounterRows<Long,String> rows = qResult.get();

for (CounterRow<Long, String> row : rows) {
	if (row.getColumnSlice().getColumns().size() >0) {
		if (!result.containsKey(row.getKey())) {
			result.put(row.getKey(), new HashMap<String,Long>());
		}
		
	    for (HCounterColumn<String> col : row.getColumnSlice().getColumns()) {
	    	result.get(row.getKey()).put(col.getName(), col.getValue());
	    }
	}
}
  	
  	return result;
  }
 
Example #2
Source File: CpEntityManager.java    From usergrid with Apache License 2.0 6 votes vote down vote up
@Override
public Map<String, Long> getEntityCounters( UUID entityId ) throws Exception {
    Map<String, Long> counters = new HashMap<String, Long>();
    Keyspace ko = cass.getApplicationKeyspace( applicationId );
    SliceCounterQuery<UUID, String> q = createCounterSliceQuery( ko, ue, se );
    q.setColumnFamily( ENTITY_COUNTERS.toString() );
    q.setRange( null, null, false, ALL_COUNT );
    //Adding graphite metrics
    Timer.Context timeEntityCounters = entGetEntityCountersTimer.time();
    QueryResult<CounterSlice<String>> r = q.setKey( entityId ).execute();
    timeEntityCounters.stop();
    for ( HCounterColumn<String> column : r.get().getColumns() ) {
        counters.put( column.getName(), column.getValue() );
    }
    return counters;
}
 
Example #3
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public Stream<ArtifactMetadata> getArtifactStream( final RepositorySession session, final String repositoryId,
                                                   final QueryParameter queryParameter ) throws MetadataResolutionException
{
    RangeSlicesQuery<String, String, String> query = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName( ) ) //
        .setColumnNames( ArtifactMetadataModel.COLUMNS ); //

    query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId );

    QueryResult<OrderedRows<String, String, String>> result = query.execute();

    try
    {
        return StreamSupport.stream( createResultSpliterator( result, ( Row<String, String, String> row, ArtifactMetadata last ) ->
            mapArtifactMetadataStringColumnSlice( row.getKey( ), row.getColumnSlice( ) ) ), false )
            .skip( queryParameter.getOffset( ) ).limit( queryParameter.getLimit( ) );
    }
    catch ( MetadataRepositoryException e )
    {
        throw new MetadataResolutionException( e.getMessage( ), e );
    }
}
 
Example #4
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public void removeFacetFromArtifact( RepositorySession session, final String repositoryId, final String namespace, final String project,
                                     final String version, final MetadataFacet metadataFacet )
    throws MetadataRepositoryException
{

    RangeSlicesQuery<String, String, String> query = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
        .setColumnNames( NAMESPACE_ID.toString() ); //

    query = query.addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
        .addEqualsExpression( PROJECT.toString(), project ) //
        .addEqualsExpression( VERSION.toString(), version );

    QueryResult<OrderedRows<String, String, String>> result = query.execute();

    for ( Row<String, String, String> row : result.get() )
    {
        this.artifactMetadataTemplate.deleteRow( row.getKey() );
    }
}
 
Example #5
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public void removeMetadataFacet( RepositorySession session, final String repositoryId, final String facetId, final String name )
    throws MetadataRepositoryException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
        .setColumnNames( KEY.toString(), VALUE.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( FACET_ID.toString(), facetId ) //
        .addEqualsExpression( NAME.toString(), name ) //
        .execute();

    for ( Row<String, String, String> row : result.get() )
    {
        this.metadataFacetTemplate.deleteRow( row.getKey() );
    }
}
 
Example #6
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public void removeMetadataFacets( RepositorySession session, final String repositoryId, final String facetId )
    throws MetadataRepositoryException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
        .setColumnNames( KEY.toString(), VALUE.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( FACET_ID.toString(), facetId ) //
        .execute();

    for ( Row<String, String, String> row : result.get() )
    {
        this.metadataFacetTemplate.deleteRow( row.getKey() );
    }

}
 
Example #7
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public List<String> getMetadataFacets( RepositorySession session, final String repositoryId, final String facetId )
    throws MetadataRepositoryException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
        .setColumnNames( NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( FACET_ID.toString(), facetId ) //
        .execute();

    final List<String> facets = new ArrayList<>();

    for ( Row<String, String, String> row : result.get() )
    {
        facets.add( getStringValue( row.getColumnSlice(), NAME.toString() ) );
    }
    return facets;
}
 
Example #8
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public List<String> getArtifactVersions( RepositorySession session, final String repoId, final String namespace, final String projectId,
                                         final String projectVersion )
    throws MetadataResolutionException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
        .setColumnNames( VERSION.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
        .addEqualsExpression( PROJECT_ID.toString(), projectId ) //
        .addEqualsExpression( PROJECT_VERSION.toString(), projectVersion ) //
        .execute();

    final Set<String> versions = new HashSet<>();

    for ( Row<String, String, String> row : result.get() )
    {
        versions.add( getStringValue( row.getColumnSlice(), VERSION.toString() ) );
    }

    return new ArrayList<>( versions );

}
 
Example #9
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected List<License> getLicenses( String projectVersionMetadataKey )
{
    List<License> licenses = new ArrayList<>();

    QueryResult<OrderedRows<String, String, String>> result =
        HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
            .setColumnFamily( cassandraArchivaManager.getLicenseFamilyName() ) //
            .setColumnNames( "projectVersionMetadataModel.key" ) //
            .setRowCount( Integer.MAX_VALUE ) //
            .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
            .execute();

    for ( Row<String, String, String> row : result.get() )
    {
        ColumnFamilyResult<String, String> columnFamilyResult = this.licenseTemplate.queryColumns( row.getKey() );

        licenses.add(
            new License( columnFamilyResult.getString( NAME.toString() ), columnFamilyResult.getString( URL.toString() ) ) );
    }

    return licenses;
}
 
Example #10
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected void removeMailingList( String projectVersionMetadataKey )
{

    QueryResult<OrderedRows<String, String, String>> result =
        HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
            .setColumnFamily( cassandraArchivaManager.getMailingListFamilyName() ) //
            .setColumnNames( NAME.toString() ) //
            .setRowCount( Integer.MAX_VALUE ) //
            .addEqualsExpression( "projectVersionMetadataModel.key", projectVersionMetadataKey ) //
            .execute();

    if ( result.get().getCount() < 1 )
    {
        return;
    }

    for ( Row<String, String, String> row : result.get() )
    {
        this.mailingListTemplate.deleteRow( row.getKey() );
    }

}
 
Example #11
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected Map<String, String> getChecksums( String artifactMetadataKey )
{
    Map<String, String> checksums = new HashMap<>();

    QueryResult<OrderedRows<String, String, String>> result =
            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
                    .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) //
                    .setColumnNames( ARTIFACT_METADATA_MODEL_KEY, REPOSITORY_NAME.toString(),
                            CHECKSUM_ALG.toString(), CHECKSUM_VALUE.toString() ) //
                    .setRowCount( Integer.MAX_VALUE ) //
                    .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey) //
                    .execute();
    for ( Row<String, String, String> row : result.get() )
    {
        ColumnFamilyResult<String, String> columnFamilyResult =
                this.checksumTemplate.queryColumns( row.getKey() );

        checksums.put(columnFamilyResult.getString(CHECKSUM_ALG.toString()),
                columnFamilyResult.getString(CHECKSUM_VALUE.toString()));
    }

    return checksums;
}
 
Example #12
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected void removeChecksums( String artifactMetadataKey )
{

    QueryResult<OrderedRows<String, String, String>> result =
            HFactory.createRangeSlicesQuery( cassandraArchivaManager.getKeyspace(), ss, ss, ss ) //
                    .setColumnFamily( cassandraArchivaManager.getChecksumFamilyName() ) //
                    .setColumnNames( CHECKSUM_ALG.toString() ) //
                    .setRowCount( Integer.MAX_VALUE ) //
                    .addEqualsExpression(ARTIFACT_METADATA_MODEL_KEY, artifactMetadataKey ) //
                    .execute();

    if ( result.get().getCount() < 1 )
    {
        return;
    }

    for ( Row<String, String, String> row : result.get() )
    {
        this.checksumTemplate.deleteRow( row.getKey() );
    }

}
 
Example #13
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public List<String> getProjects( RepositorySession session, final String repoId, final String namespace )
    throws MetadataResolutionException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getProjectFamilyName() ) //
        .setColumnNames( PROJECT_ID.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .addEqualsExpression( NAMESPACE_ID.toString(), namespace ) //
        .execute();

    final Set<String> projects = new HashSet<>( result.get( ).getCount( ) );

    for ( Row<String, String, String> row : result.get() )
    {
        projects.add( getStringValue( row.getColumnSlice(), PROJECT_ID.toString() ) );
    }

    return new ArrayList<>( projects );
}
 
Example #14
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected List<String> getNamespaces( final String repoId )
    throws MetadataResolutionException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
        .setColumnNames( NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .execute();

    List<String> namespaces = new ArrayList<>( result.get().getCount() );

    for ( Row<String, String, String> row : result.get() )
    {
        namespaces.add( getStringValue( row.getColumnSlice(), NAME.toString() ) );
    }

    return namespaces;
}
 
Example #15
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
@Override
public List<String> getRootNamespaces( RepositorySession session, final String repoId )
    throws MetadataResolutionException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
        .setColumnNames( NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .execute();

    Set<String> namespaces = new HashSet<>( result.get( ).getCount( ) );

    for ( Row<String, String, String> row : result.get() )
    {
        namespaces.add( StringUtils.substringBefore( getStringValue( row.getColumnSlice(), NAME.toString() ), "." ) );
    }

    return new ArrayList<>( namespaces );
}
 
Example #16
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 6 votes vote down vote up
protected Namespace getNamespace( String repositoryId, String namespaceId )
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
        .setColumnNames( REPOSITORY_NAME.toString(), NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( NAME.toString(), namespaceId ) //
        .execute();
    if ( result.get().getCount() > 0 )
    {
        ColumnSlice<String, String> columnSlice = result.get().getList().get( 0 ).getColumnSlice();
        return new Namespace( getStringValue( columnSlice, NAME.toString() ), //
                              new Repository( getStringValue( columnSlice, REPOSITORY_NAME.toString() ) ) );

    }
    return null;
}
 
Example #17
Source File: OpsCiStateDao.java    From oneops with Apache License 2.0 6 votes vote down vote up
public List<CiChangeStateEvent> getCiStateHistory(long ciId, Long startTime, Long endTime, Integer count) {

        if (count == null) count = 1000;
        List<CiChangeStateEvent> states = new ArrayList<CiChangeStateEvent>();
        SliceQuery<Long, Long, String> sliceQuery = HFactory.createSliceQuery(keyspace, longSerializer, longSerializer, stringSerializer);
        sliceQuery.setColumnFamily(SchemaBuilder.CI_STATE_HIST_CF);
        sliceQuery.setRange(startTime, endTime, false, count);
        sliceQuery.setKey(ciId);
        QueryResult<ColumnSlice<Long, String>> result = sliceQuery.execute();
        ColumnSlice<Long, String> resultCols = result.get();
        for (HColumn<Long, String> col : resultCols.getColumns()) {
            CiChangeStateEvent event = gson.fromJson(col.getValue(), CiChangeStateEvent.class);
            states.add(event);
        }
        return states;
    }
 
Example #18
Source File: OpsCiStateDao.java    From oneops with Apache License 2.0 6 votes vote down vote up
public Map<String,Long> getComponentStates(Long manifestId) {
  	
  	Map<String,Long> result = new HashMap<String,Long>();
  	SliceCounterQuery<Long, String> query =  HFactory.createCounterSliceQuery(keyspace, longSerializer, stringSerializer);
  	query.setKey(manifestId);
  	query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF);
  	query.setRange(null, null, false, 100);
  	QueryResult<CounterSlice<String>> qResult = query.execute();

  	CounterSlice<String> row = qResult.get();		
if (row != null && row.getColumns().size()>0) {
	for (HCounterColumn<String> col :row.getColumns()) {
		result.put(col.getName(), col.getValue());
	}
}
  	return result;
  }
 
Example #19
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
@Override
public <T extends MetadataFacet> T getMetadataFacet( RepositorySession session, final String repositoryId, final Class<T> facetClazz, final String name )
    throws MetadataRepositoryException
{
    final MetadataFacetFactory<T> metadataFacetFactory = getFacetFactory( facetClazz );
    if (metadataFacetFactory==null) {
        return null;
    }
    final String facetId = metadataFacetFactory.getFacetId( );

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getMetadataFacetFamilyName() ) //
        .setColumnNames( KEY.toString(), VALUE.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repositoryId ) //
        .addEqualsExpression( FACET_ID.toString(), facetId ) //
        .addEqualsExpression( NAME.toString(), name ) //
        .execute();

    T metadataFacet = metadataFacetFactory.createMetadataFacet( repositoryId, name );
    int size = result.get().getCount();
    if ( size < 1 )
    {
        return null;
    }
    Map<String, String> map = new HashMap<>( size );
    for ( Row<String, String, String> row : result.get() )
    {
        ColumnSlice<String, String> columnSlice = row.getColumnSlice();
        map.put( getStringValue( columnSlice, KEY.toString() ), getStringValue( columnSlice, VALUE.toString() ) );
    }
    metadataFacet.fromProperties( map );
    return metadataFacet;
}
 
Example #20
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
@Override
public List<String> getChildNamespaces( RepositorySession session, final String repoId, final String namespaceId )
    throws MetadataResolutionException
{

    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getNamespaceFamilyName() ) //
        .setColumnNames( NAME.toString() ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .execute();

    List<String> namespaces = new ArrayList<>( result.get().getCount() );

    for ( Row<String, String, String> row : result.get() )
    {
        String currentNamespace = getStringValue( row.getColumnSlice(), NAME.toString() );
        if ( StringUtils.startsWith( currentNamespace, namespaceId ) //
            && ( StringUtils.length( currentNamespace ) > StringUtils.length( namespaceId ) ) )
        {
            // store after namespaceId '.' but before next '.'
            // call org namespace org.apache.maven.shared -> stored apache

            String calledNamespace = StringUtils.endsWith( namespaceId, "." ) ? namespaceId : namespaceId + ".";
            String storedNamespace = StringUtils.substringAfter( currentNamespace, calledNamespace );

            storedNamespace = StringUtils.substringBefore( storedNamespace, "." );

            namespaces.add( storedNamespace );
        }
    }

    return namespaces;

}
 
Example #21
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
@Override
public List<ArtifactMetadata> getArtifactsByDateRange( RepositorySession session, final String repositoryId, final ZonedDateTime startTime,
                                                       final ZonedDateTime endTime, QueryParameter queryParameter )
    throws MetadataRepositoryException
{

    LongSerializer ls = LongSerializer.get();
    RangeSlicesQuery<String, String, Long> query = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ls ) //
        .setColumnFamily( cassandraArchivaManager.getArtifactMetadataFamilyName() ) //
        .setColumnNames( ArtifactMetadataModel.COLUMNS ); //


    if ( startTime != null )
    {
        query = query.addGteExpression( WHEN_GATHERED.toString(), startTime.toInstant().toEpochMilli() );
    }
    if ( endTime != null )
    {
        query = query.addLteExpression( WHEN_GATHERED.toString(), endTime.toInstant().toEpochMilli() );
    }
    QueryResult<OrderedRows<String, String, Long>> result = query.execute();

    List<ArtifactMetadata> artifactMetadatas = new ArrayList<>( result.get().getCount() );
    Iterator<Row<String, String, Long>> keyIter = result.get().iterator();
    if (keyIter.hasNext()) {
        String key = keyIter.next().getKey();
        for (Row<String, String, Long> row : result.get()) {
            ColumnSlice<String, Long> columnSlice = row.getColumnSlice();
            String repositoryName = getAsStringValue(columnSlice, REPOSITORY_NAME.toString());
            if (StringUtils.equals(repositoryName, repositoryId)) {

                artifactMetadatas.add(mapArtifactMetadataLongColumnSlice(key, columnSlice));
            }
        }
    }

    return artifactMetadatas;
}
 
Example #22
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
@Override
public List<ArtifactMetadata> getArtifactsByProjectVersionAttribute( RepositorySession session, String key, String value, String repositoryId )
    throws MetadataRepositoryException
{
    QueryResult<OrderedRows<String, String, String>> result =
        HFactory.createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getProjectVersionMetadataFamilyName() ) //
        .setColumnNames( PROJECT_ID.toString(), REPOSITORY_NAME.toString(), NAMESPACE_ID.toString(),
                         PROJECT_VERSION.toString() ) //
        .addEqualsExpression( key, value ) //
        .execute();

    int count = result.get().getCount();

    if ( count < 1 )
    {
        return Collections.emptyList();
    }

    List<ArtifactMetadata> artifacts = new LinkedList<>( );

    for ( Row<String, String, String> row : result.get() )
    {
        // TODO doing multiple queries, there should be a way to get all the artifactMetadatas for any number of
        // projects
        try
        {
            artifacts.addAll( getArtifacts( session,
                getStringValue( row.getColumnSlice(), REPOSITORY_NAME ),
                getStringValue( row.getColumnSlice(), NAMESPACE_ID ),
                getStringValue( row.getColumnSlice(), PROJECT_ID ), getStringValue( row.getColumnSlice(), PROJECT_VERSION ) ) );
        }
        catch ( MetadataResolutionException e )
        {
            // never raised
            throw new IllegalStateException( e );
        }
    }
    return artifacts;
}
 
Example #23
Source File: OpsCiStateDao.java    From oneops with Apache License 2.0 5 votes vote down vote up
public Long getComponentStatesTimestamp(Long manifestIds) {
    CounterQuery<Long, String> query =  HFactory.createCounterColumnQuery(keyspace, longSerializer, stringSerializer);
	query.setKey(manifestIds);
	query.setColumnFamily(SchemaBuilder.COMPONENT_STATE_CF);
	query.setName(COMPONENT_TIMESTAMP);
	QueryResult<HCounterColumn<String>> qResult = query.execute();
		
	HCounterColumn<String> col = qResult.get();
		
	if (col != null) {
		return col.getValue();
	} else {
		return null;
	}
}
 
Example #24
Source File: CassandraMetadataRepository.java    From archiva with Apache License 2.0 5 votes vote down vote up
@Override
public List<ProjectVersionReference> getProjectReferences( RepositorySession session, String repoId, String namespace, String projectId,
                                                           String projectVersion )
    throws MetadataResolutionException
{
    QueryResult<OrderedRows<String, String, String>> result = HFactory //
        .createRangeSlicesQuery( keyspace, ss, ss, ss ) //
        .setColumnFamily( cassandraArchivaManager.getDependencyFamilyName() ) //
        .setColumnNames( "projectVersionMetadataModel.key" ) //
        .addEqualsExpression( REPOSITORY_NAME.toString(), repoId ) //
        .addEqualsExpression( GROUP_ID.toString(), namespace ) //
        .addEqualsExpression( ARTIFACT_ID.toString(), projectId ) //
        .addEqualsExpression( VERSION.toString(), projectVersion ) //
        .execute();

    List<String> dependenciesIds = new ArrayList<>( result.get().getCount() );

    for ( Row<String, String, String> row : result.get().getList() )
    {
        dependenciesIds.add( getStringValue( row.getColumnSlice(), "projectVersionMetadataModel.key" ) );
    }

    List<ProjectVersionReference> references = new ArrayList<>( result.get().getCount() );

    for ( String key : dependenciesIds )
    {
        ColumnFamilyResult<String, String> columnFamilyResult =
            this.projectVersionMetadataTemplate.queryColumns( key );
        references.add( new ProjectVersionReference( ProjectVersionReference.ReferenceType.DEPENDENCY, //
                                                     columnFamilyResult.getString( PROJECT_ID.toString() ), //
                                                     columnFamilyResult.getString( NAMESPACE_ID.toString() ), //
                                                     columnFamilyResult.getString( PROJECT_VERSION.toString() ) ) );
    }

    return references;
}
 
Example #25
Source File: QueueManagerImpl.java    From usergrid with Apache License 2.0 5 votes vote down vote up
@Override
public Message getMessage( UUID messageId ) {
    SliceQuery<UUID, String, ByteBuffer> q =
            createSliceQuery( cass.getApplicationKeyspace( applicationId ), ue, se, be );
    q.setColumnFamily( MESSAGE_PROPERTIES.getColumnFamily() );
    q.setKey( messageId );
    q.setRange( null, null, false, ALL_COUNT );
    QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute();
    ColumnSlice<String, ByteBuffer> slice = r.get();
    List<HColumn<String, ByteBuffer>> results = slice.getColumns();
    return deserializeMessage( results );
}
 
Example #26
Source File: QueueManagerImpl.java    From usergrid with Apache License 2.0 5 votes vote down vote up
public AggregateCounterSet getAggregateCounters( UUID queueId, String category, String counterName,
                                                 CounterResolution resolution, long start, long finish,
                                                 boolean pad ) {

    start = resolution.round( start );
    finish = resolution.round( finish );
    long expected_time = start;
    Keyspace ko = cass.getApplicationKeyspace( applicationId );
    SliceCounterQuery<String, Long> q = createCounterSliceQuery( ko, se, le );
    q.setColumnFamily( APPLICATION_AGGREGATE_COUNTERS.getColumnFamily() );
    q.setRange( start, finish, false, ALL_COUNT );
    QueryResult<CounterSlice<Long>> r = q.setKey(
            counterUtils.getAggregateCounterRow( counterName, null, null, queueId, category, resolution ) )
                                         .execute();
    List<AggregateCounter> counters = new ArrayList<AggregateCounter>();
    for ( HCounterColumn<Long> column : r.get().getColumns() ) {
        AggregateCounter count = new AggregateCounter( column.getName(), column.getValue() );
        if ( pad && !( resolution == CounterResolution.ALL ) ) {
            while ( count.getTimestamp() != expected_time ) {
                counters.add( new AggregateCounter( expected_time, 0 ) );
                expected_time = resolution.next( expected_time );
            }
            expected_time = resolution.next( expected_time );
        }
        counters.add( count );
    }
    if ( pad && !( resolution == CounterResolution.ALL ) ) {
        while ( expected_time <= finish ) {
            counters.add( new AggregateCounter( expected_time, 0 ) );
            expected_time = resolution.next( expected_time );
        }
    }
    return new AggregateCounterSet( counterName, queueId, category, counters );
}
 
Example #27
Source File: QueueManagerImpl.java    From usergrid with Apache License 2.0 5 votes vote down vote up
public Map<String, Long> getQueueCounters( UUID queueId ) throws Exception {

        Map<String, Long> counters = new HashMap<String, Long>();
        Keyspace ko = cass.getApplicationKeyspace( applicationId );
        SliceCounterQuery<UUID, String> q = createCounterSliceQuery( ko, ue, se );
        q.setColumnFamily( COUNTERS.getColumnFamily() );
        q.setRange( null, null, false, ALL_COUNT );
        QueryResult<CounterSlice<String>> r = q.setKey( queueId ).execute();
        for ( HCounterColumn<String> column : r.get().getColumns() ) {
            counters.put( column.getName(), column.getValue() );
        }
        return counters;
    }
 
Example #28
Source File: QueueManagerImpl.java    From usergrid with Apache License 2.0 5 votes vote down vote up
public Queue getQueue( String queuePath, UUID queueId ) {
    SliceQuery<UUID, String, ByteBuffer> q =
            createSliceQuery( cass.getApplicationKeyspace( applicationId ), ue, se, be );
    q.setColumnFamily( QUEUE_PROPERTIES.getColumnFamily() );
    q.setKey( queueId );
    q.setRange( null, null, false, ALL_COUNT );
    QueryResult<ColumnSlice<String, ByteBuffer>> r = q.execute();
    ColumnSlice<String, ByteBuffer> slice = r.get();
    List<HColumn<String, ByteBuffer>> results = slice.getColumns();
    return deserializeQueue( results );
}
 
Example #29
Source File: CassandraService.java    From usergrid with Apache License 2.0 5 votes vote down vote up
/**
 * Gets the columns.
 *
 * @param ko the keyspace
 * @param columnFamily the column family
 * @param key the key
 *
 * @return columns
 *
 * @throws Exception the exception
 */
public <N, V> List<HColumn<N, V>> getAllColumns( Keyspace ko, Object columnFamily, Object key,
                                                 Serializer<N> nameSerializer, Serializer<V> valueSerializer )
        throws Exception {

    if ( db_logger.isTraceEnabled() ) {
        db_logger.trace( "getColumns cf={} key={}", columnFamily, key );
    }

    SliceQuery<ByteBuffer, N, V> q = createSliceQuery( ko, be, nameSerializer, valueSerializer );
    q.setColumnFamily( columnFamily.toString() );
    q.setKey( bytebuffer( key ) );
    q.setRange( null, null, false, ALL_COUNT );
    QueryResult<ColumnSlice<N, V>> r = q.execute();
    ColumnSlice<N, V> slice = r.get();
    List<HColumn<N, V>> results = slice.getColumns();

    if ( db_logger.isTraceEnabled() ) {
        if ( results == null ) {
            db_logger.trace( "getColumns returned null" );
        }
        else {
            db_logger.trace( "getColumns returned {} columns", results.size() );
        }
    }

    return results;
}
 
Example #30
Source File: CassandraService.java    From usergrid with Apache License 2.0 5 votes vote down vote up
/**
 * Gets the columns.
 *
 * @param ko the keyspace
 * @param columnFamily the column family
 * @param key the key
 * @param columnNames the column names
 *
 * @return columns
 *
 * @throws Exception the exception
 */
@SuppressWarnings("unchecked")
public <N, V> List<HColumn<N, V>> getColumns( Keyspace ko, Object columnFamily, Object key, Set<String> columnNames,
                                              Serializer<N> nameSerializer, Serializer<V> valueSerializer )
        throws Exception {

    if ( db_logger.isTraceEnabled() ) {
        db_logger.trace( "getColumns cf={} key={} names={}", columnFamily, key, columnNames );
    }

    SliceQuery<ByteBuffer, N, V> q = createSliceQuery( ko, be, nameSerializer, valueSerializer );
    q.setColumnFamily( columnFamily.toString() );
    q.setKey( bytebuffer( key ) );
    // q.setColumnNames(columnNames.toArray(new String[0]));
    q.setColumnNames( ( N[] ) nameSerializer.fromBytesSet( se.toBytesSet( new ArrayList<String>( columnNames ) ) )
                                            .toArray() );

    QueryResult<ColumnSlice<N, V>> r = q.execute();
    ColumnSlice<N, V> slice = r.get();
    List<HColumn<N, V>> results = slice.getColumns();

    if ( db_logger.isTraceEnabled() ) {
        if ( results == null ) {
            db_logger.trace( "getColumns returned null" );
        }
        else {
            db_logger.trace( "getColumns returned {} columns", results.size());
        }
    }

    return results;
}