Java Code Examples for io.prestosql.spi.predicate.TupleDomain

The following examples show how to use io.prestosql.spi.predicate.TupleDomain. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: presto   Source File: TestParquetPredicateUtils.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testParquetTupleDomainMap()
{
    MapType mapType = new MapType(
            INTEGER,
            INTEGER,
            methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"),
            methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"),
            methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"),
            methodHandle(TestParquetPredicateUtils.class, "throwUnsupportedOperationException"));

    HiveColumnHandle columnHandle = createBaseColumn("my_map", 0, HiveType.valueOf("map<int,int>"), mapType, REGULAR, Optional.empty());

    TupleDomain<HiveColumnHandle> domain = withColumnDomains(ImmutableMap.of(columnHandle, Domain.notNull(mapType)));

    MessageType fileSchema = new MessageType("hive_schema",
            new GroupType(OPTIONAL, "my_map",
                    new GroupType(REPEATED, "map",
                            new PrimitiveType(REQUIRED, INT32, "key"),
                            new PrimitiveType(OPTIONAL, INT32, "value"))));

    Map<List<String>, RichColumnDescriptor> descriptorsByPath = getDescriptors(fileSchema, fileSchema);
    TupleDomain<ColumnDescriptor> tupleDomain = getParquetTupleDomain(descriptorsByPath, domain, fileSchema, true);
    assertTrue(tupleDomain.isAll());
}
 
Example 2
Source Project: presto   Source File: RaptorMetadata.java    License: Apache License 2.0 6 votes vote down vote up
private RaptorTableHandle getTableHandle(SchemaTableName tableName)
{
    requireNonNull(tableName, "tableName is null");
    Table table = dao.getTableInformation(tableName.getSchemaName(), tableName.getTableName());
    if (table == null) {
        return null;
    }
    List<TableColumn> tableColumns = dao.listTableColumns(table.getTableId());
    checkArgument(!tableColumns.isEmpty(), "Table '%s' does not have any columns", tableName);

    return new RaptorTableHandle(
            tableName.getSchemaName(),
            tableName.getTableName(),
            table.getTableId(),
            table.getDistributionId(),
            table.getDistributionName(),
            table.getBucketCount(),
            table.isOrganized(),
            OptionalLong.empty(),
            TupleDomain.all(),
            table.getDistributionId().map(shardManager::getBucketAssignments),
            false);
}
 
Example 3
Source Project: presto   Source File: TestBackgroundHiveSplitLoader.java    License: Apache License 2.0 6 votes vote down vote up
private static BackgroundHiveSplitLoader backgroundHiveSplitLoaderOfflinePartitions()
{
    ConnectorSession connectorSession = getHiveSession(new HiveConfig()
            .setMaxSplitSize(DataSize.of(1, GIGABYTE)));

    return new BackgroundHiveSplitLoader(
            SIMPLE_TABLE,
            createPartitionMetadataWithOfflinePartitions(),
            TupleDomain.all(),
            TupleDomain::all,
            TYPE_MANAGER,
            createBucketSplitInfo(Optional.empty(), Optional.empty()),
            connectorSession,
            new TestingHdfsEnvironment(TEST_FILES),
            new NamenodeStats(),
            new CachingDirectoryLister(new HiveConfig()),
            directExecutor(),
            2,
            false,
            false,
            Optional.empty());
}
 
Example 4
Source Project: presto   Source File: HivePartitionResult.java    License: Apache License 2.0 6 votes vote down vote up
public HivePartitionResult(
        List<HiveColumnHandle> partitionColumns,
        Iterable<HivePartition> partitions,
        TupleDomain<HiveColumnHandle> compactEffectivePredicate,
        TupleDomain<ColumnHandle> unenforcedConstraint,
        TupleDomain<ColumnHandle> enforcedConstraint,
        Optional<HiveBucketHandle> bucketHandle,
        Optional<HiveBucketFilter> bucketFilter)
{
    this.partitionColumns = requireNonNull(partitionColumns, "partitionColumns is null");
    this.partitions = requireNonNull(partitions, "partitions is null");
    this.compactEffectivePredicate = requireNonNull(compactEffectivePredicate, "compactEffectivePredicate is null");
    this.unenforcedConstraint = requireNonNull(unenforcedConstraint, "unenforcedConstraint is null");
    this.enforcedConstraint = requireNonNull(enforcedConstraint, "enforcedConstraint is null");
    this.bucketHandle = requireNonNull(bucketHandle, "bucketHandle is null");
    this.bucketFilter = requireNonNull(bucketFilter, "bucketFilter is null");
}
 
Example 5
Source Project: presto   Source File: TestOrcPageSourceFactory.java    License: Apache License 2.0 6 votes vote down vote up
private static void assertRead(Set<NationColumn> columns, OptionalLong nationKeyPredicate, Optional<AcidInfo> acidInfo, LongPredicate deletedRows)
        throws Exception
{
    TupleDomain<HiveColumnHandle> tupleDomain = TupleDomain.all();
    if (nationKeyPredicate.isPresent()) {
        tupleDomain = TupleDomain.withColumnDomains(ImmutableMap.of(toHiveColumnHandle(NATION_KEY), Domain.singleValue(BIGINT, nationKeyPredicate.getAsLong())));
    }

    List<Nation> actual = readFile(columns, tupleDomain, acidInfo);

    List<Nation> expected = new ArrayList<>();
    for (Nation nation : ImmutableList.copyOf(new NationGenerator().iterator())) {
        if (nationKeyPredicate.isPresent() && nationKeyPredicate.getAsLong() != nation.getNationKey()) {
            continue;
        }
        if (deletedRows.test(nation.getNationKey())) {
            continue;
        }
        expected.addAll(nCopies(1000, nation));
    }

    assertEqualsByColumns(columns, actual, expected);
}
 
Example 6
Source Project: presto   Source File: TestLocalDynamicFilterConsumer.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleColumns()
        throws ExecutionException, InterruptedException
{
    LocalDynamicFilterConsumer filter = new LocalDynamicFilterConsumer(
            ImmutableMultimap.of(new DynamicFilterId("123"), new Symbol("a"), new DynamicFilterId("456"), new Symbol("b")),
            ImmutableMap.of(new DynamicFilterId("123"), 0, new DynamicFilterId("456"), 1),
            ImmutableMap.of(new DynamicFilterId("123"), INTEGER, new DynamicFilterId("456"), INTEGER),
            1);
    assertEquals(filter.getBuildChannels(), ImmutableMap.of(new DynamicFilterId("123"), 0, new DynamicFilterId("456"), 1));
    Consumer<TupleDomain<DynamicFilterId>> consumer = filter.getTupleDomainConsumer();
    ListenableFuture<Map<Symbol, Domain>> result = filter.getNodeLocalDynamicFilterForSymbols();
    assertFalse(result.isDone());

    consumer.accept(TupleDomain.withColumnDomains(ImmutableMap.of(
            new DynamicFilterId("123"), Domain.singleValue(INTEGER, 10L),
            new DynamicFilterId("456"), Domain.singleValue(INTEGER, 20L))));
    assertEquals(result.get(), ImmutableMap.of(
            new Symbol("a"), Domain.singleValue(INTEGER, 10L),
            new Symbol("b"), Domain.singleValue(INTEGER, 20L)));
}
 
Example 7
Source Project: presto   Source File: TestJmxSplitManager.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNoPredicate()
        throws Exception
{
    JmxTableHandle tableHandle = new JmxTableHandle(new SchemaTableName("schema", "tableName"), ImmutableList.of("objectName"), ImmutableList.of(columnHandle), true, TupleDomain.all());
    ConnectorSplitSource splitSource = splitManager.getSplits(JmxTransactionHandle.INSTANCE, SESSION, tableHandle, UNGROUPED_SCHEDULING);
    List<ConnectorSplit> allSplits = getAllSplits(splitSource);
    assertEquals(allSplits.size(), nodes.size());

    Set<String> actualNodes = nodes.stream().map(Node::getNodeIdentifier).collect(toSet());
    Set<String> expectedNodes = new HashSet<>();
    for (ConnectorSplit split : allSplits) {
        List<HostAddress> addresses = split.getAddresses();
        assertEquals(addresses.size(), 1);
        expectedNodes.add(addresses.get(0).getHostText());
    }
    assertEquals(actualNodes, expectedNodes);
}
 
Example 8
Source Project: presto   Source File: TestJmxMetadata.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testApplyFilterWithConstraint()
{
    JmxTableHandle handle = metadata.getTableHandle(SESSION, new SchemaTableName(JMX_SCHEMA_NAME, "java.lang:*"));

    JmxColumnHandle nodeColumnHandle = new JmxColumnHandle("node", createUnboundedVarcharType());
    NullableValue nodeColumnValue = NullableValue.of(createUnboundedVarcharType(), utf8Slice(localNode.getNodeIdentifier()));

    JmxColumnHandle objectNameColumnHandle = new JmxColumnHandle("object_name", createUnboundedVarcharType());
    NullableValue objectNameColumnValue = NullableValue.of(createUnboundedVarcharType(), utf8Slice("presto.memory:type=MemoryPool,name=reserved"));

    TupleDomain<ColumnHandle> tupleDomain = TupleDomain.fromFixedValues(ImmutableMap.of(nodeColumnHandle, nodeColumnValue, objectNameColumnHandle, objectNameColumnValue));

    Optional<ConstraintApplicationResult<ConnectorTableHandle>> result = metadata.applyFilter(SESSION, handle, new Constraint(tupleDomain));

    assertTrue(result.isPresent());
    assertEquals(result.get().getRemainingFilter(), TupleDomain.fromFixedValues(ImmutableMap.of(objectNameColumnHandle, objectNameColumnValue)));
    assertEquals(((JmxTableHandle) result.get().getHandle()).getNodeFilter(), TupleDomain.fromFixedValues(ImmutableMap.of(nodeColumnHandle, nodeColumnValue)));
}
 
Example 9
Source Project: presto   Source File: PinotMetadata.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle table, Constraint constraint)
{
    PinotTableHandle handle = (PinotTableHandle) table;
    TupleDomain<ColumnHandle> oldDomain = handle.getConstraint();
    TupleDomain<ColumnHandle> newDomain = oldDomain.intersect(constraint.getSummary());
    if (oldDomain.equals(newDomain)) {
        return Optional.empty();
    }

    handle = new PinotTableHandle(
            handle.getSchemaName(),
            handle.getTableName(),
            newDomain,
            handle.getLimit(),
            handle.getQuery());
    return Optional.of(new ConstraintApplicationResult<>(handle, constraint.getSummary()));
}
 
Example 10
Source Project: presto   Source File: KuduMetadata.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle table, Constraint constraint)
{
    KuduTableHandle handle = (KuduTableHandle) table;

    TupleDomain<ColumnHandle> oldDomain = handle.getConstraint();
    TupleDomain<ColumnHandle> newDomain = oldDomain.intersect(constraint.getSummary());
    if (oldDomain.equals(newDomain)) {
        return Optional.empty();
    }

    handle = new KuduTableHandle(
            handle.getSchemaTableName(),
            handle.getTable(clientSession),
            newDomain,
            handle.getDesiredColumns(),
            handle.isDeleteHandle());

    return Optional.of(new ConstraintApplicationResult<>(handle, constraint.getSummary()));
}
 
Example 11
Source Project: presto   Source File: TestDomainTranslator.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testRoundTrip()
{
    TupleDomain<Symbol> tupleDomain = withColumnDomains(ImmutableMap.<Symbol, Domain>builder()
            .put(C_BIGINT, Domain.singleValue(BIGINT, 1L))
            .put(C_DOUBLE, Domain.onlyNull(DOUBLE))
            .put(C_VARCHAR, Domain.notNull(VARCHAR))
            .put(C_BOOLEAN, Domain.singleValue(BOOLEAN, true))
            .put(C_BIGINT_1, Domain.singleValue(BIGINT, 2L))
            .put(C_DOUBLE_1, Domain.create(ValueSet.ofRanges(Range.lessThanOrEqual(DOUBLE, 1.1), Range.equal(DOUBLE, 2.0), Range.range(DOUBLE, 3.0, false, 3.5, true)), true))
            .put(C_VARCHAR_1, Domain.create(ValueSet.ofRanges(Range.lessThanOrEqual(VARCHAR, utf8Slice("2013-01-01")), Range.greaterThan(VARCHAR, utf8Slice("2013-10-01"))), false))
            .put(C_TIMESTAMP, Domain.singleValue(TIMESTAMP, TIMESTAMP_VALUE))
            .put(C_DATE, Domain.singleValue(DATE, DATE_VALUE))
            .put(C_COLOR, Domain.singleValue(COLOR, COLOR_VALUE_1))
            .put(C_HYPER_LOG_LOG, Domain.notNull(HYPER_LOG_LOG))
            .build());

    assertPredicateTranslates(toPredicate(tupleDomain), tupleDomain);
}
 
Example 12
Source Project: presto   Source File: LocalFileMetadata.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Optional<ConstraintApplicationResult<ConnectorTableHandle>> applyFilter(ConnectorSession session, ConnectorTableHandle table, Constraint constraint)
{
    LocalFileTableHandle handle = (LocalFileTableHandle) table;

    TupleDomain<ColumnHandle> oldDomain = handle.getConstraint();
    TupleDomain<ColumnHandle> newDomain = oldDomain.intersect(constraint.getSummary());
    if (oldDomain.equals(newDomain)) {
        return Optional.empty();
    }

    handle = new LocalFileTableHandle(
            handle.getSchemaTableName(),
            handle.getTimestampColumn(),
            handle.getServerAddressColumn(),
            newDomain);

    return Optional.of(new ConstraintApplicationResult<>(handle, constraint.getSummary()));
}
 
Example 13
Source Project: presto   Source File: TestShardPredicate.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleColumnsMultipleRanges()
{
    TupleDomain<RaptorColumnHandle> tupleDomain = withColumnDomains(ImmutableMap.of(
            bucketNumberColumnHandle(),
            create(SortedRangeSet.copyOf(INTEGER, ImmutableList.of(equal(INTEGER, 1L), equal(INTEGER, 3L))), false),
            new RaptorColumnHandle("col", 1, INTEGER),
            create(SortedRangeSet.copyOf(INTEGER, ImmutableList.of(equal(INTEGER, 1L), equal(INTEGER, 3L))), false)));
    ShardPredicate shardPredicate = ShardPredicate.create(tupleDomain);
    assertEquals(
            shardPredicate.getPredicate(),
            "(((bucket_number >= ? OR bucket_number IS NULL) AND (bucket_number <= ? OR bucket_number IS NULL)) " +
                    "OR ((bucket_number >= ? OR bucket_number IS NULL) AND (bucket_number <= ? OR bucket_number IS NULL))) " +
                    "AND (((c1_max >= ? OR c1_max IS NULL) AND (c1_min <= ? OR c1_min IS NULL)) " +
                    "OR ((c1_max >= ? OR c1_max IS NULL) AND (c1_min <= ? OR c1_min IS NULL)))");
}
 
Example 14
Source Project: presto   Source File: TestJdbcRecordSetProvider.java    License: Apache License 2.0 6 votes vote down vote up
private RecordCursor getCursor(JdbcTableHandle jdbcTableHandle, List<JdbcColumnHandle> columns, TupleDomain<ColumnHandle> domain)
{
    jdbcTableHandle = new JdbcTableHandle(
            jdbcTableHandle.getSchemaTableName(),
            jdbcTableHandle.getCatalogName(),
            jdbcTableHandle.getSchemaName(),
            jdbcTableHandle.getTableName(),
            domain,
            Optional.empty(),
            OptionalLong.empty(),
            Optional.empty());

    ConnectorSplitSource splits = jdbcClient.getSplits(SESSION, jdbcTableHandle);
    JdbcSplit split = (JdbcSplit) getOnlyElement(getFutureValue(splits.getNextBatch(NOT_PARTITIONED, 1000)).getSplits());

    ConnectorTransactionHandle transaction = new JdbcTransactionHandle();
    JdbcRecordSetProvider recordSetProvider = new JdbcRecordSetProvider(jdbcClient);
    RecordSet recordSet = recordSetProvider.getRecordSet(transaction, SESSION, split, jdbcTableHandle, columns);

    return recordSet.cursor();
}
 
Example 15
Source Project: presto   Source File: TestLocalDynamicFilterConsumer.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateMultipleJoins()
        throws ExecutionException, InterruptedException
{
    SubPlan subplan = subplan(
            "SELECT count() FROM lineitem, orders, part " +
                    "WHERE lineitem.orderkey = orders.orderkey AND lineitem.partkey = part.partkey " +
                    "AND orders.custkey < 10 AND part.name = 'abc'",
            OPTIMIZED_AND_VALIDATED,
            false);

    List<JoinNode> joinNodes = searchJoins(subplan.getChildren().get(0).getFragment()).findAll();
    assertEquals(joinNodes.size(), 2);
    for (JoinNode joinNode : joinNodes) {
        LocalDynamicFilterConsumer filter = LocalDynamicFilterConsumer.create(joinNode, ImmutableList.copyOf(subplan.getFragment().getSymbols().values()), 1);
        DynamicFilterId filterId = getOnlyElement(filter.getBuildChannels().keySet());
        Symbol probeSymbol = getOnlyElement(joinNode.getCriteria()).getLeft();

        filter.getTupleDomainConsumer().accept(TupleDomain.withColumnDomains(ImmutableMap.of(
                filterId, Domain.singleValue(BIGINT, 6L))));
        assertEquals(filter.getNodeLocalDynamicFilterForSymbols().get(), ImmutableMap.of(
                probeSymbol, Domain.singleValue(BIGINT, 6L)));
    }
}
 
Example 16
Source Project: presto   Source File: DistributedExecutionPlanner.java    License: Apache License 2.0 6 votes vote down vote up
private Map<PlanNodeId, SplitSource> visitScanAndFilter(TableScanNode node, Optional<FilterNode> filter)
{
    List<DynamicFilters.Descriptor> dynamicFilters = filter
            .map(FilterNode::getPredicate)
            .map(DynamicFilters::extractDynamicFilters)
            .map(DynamicFilters.ExtractResult::getDynamicConjuncts)
            .orElse(ImmutableList.of());

    Supplier<TupleDomain<ColumnHandle>> dynamicFilterSupplier = TupleDomain::all;
    if (!dynamicFilters.isEmpty()) {
        log.debug("Dynamic filters: %s", dynamicFilters);
        dynamicFilterSupplier = dynamicFilterService.createDynamicFilterSupplier(session.getQueryId(), dynamicFilters, node.getAssignments());
    }

    // get dataSource for table
    SplitSource splitSource = splitManager.getSplits(
            session,
            node.getTable(),
            stageExecutionDescriptor.isScanGroupedExecution(node.getId()) ? GROUPED_SCHEDULING : UNGROUPED_SCHEDULING,
            dynamicFilterSupplier);

    splitSources.add(splitSource);

    return ImmutableMap.of(node.getId(), splitSource);
}
 
Example 17
Source Project: presto   Source File: TestLocalDynamicFilterConsumer.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleProbeSymbols()
        throws ExecutionException, InterruptedException
{
    LocalDynamicFilterConsumer filter = new LocalDynamicFilterConsumer(
            ImmutableMultimap.of(new DynamicFilterId("123"), new Symbol("a1"), new DynamicFilterId("123"), new Symbol("a2")),
            ImmutableMap.of(new DynamicFilterId("123"), 0),
            ImmutableMap.of(new DynamicFilterId("123"), INTEGER),
            1);
    assertEquals(filter.getBuildChannels(), ImmutableMap.of(new DynamicFilterId("123"), 0));
    Consumer<TupleDomain<DynamicFilterId>> consumer = filter.getTupleDomainConsumer();
    ListenableFuture<Map<Symbol, Domain>> result = filter.getNodeLocalDynamicFilterForSymbols();
    assertFalse(result.isDone());

    consumer.accept(TupleDomain.withColumnDomains(ImmutableMap.of(
            new DynamicFilterId("123"), Domain.singleValue(INTEGER, 7L))));
    assertEquals(result.get(), ImmutableMap.of(
            new Symbol("a1"), Domain.singleValue(INTEGER, 7L),
            new Symbol("a2"), Domain.singleValue(INTEGER, 7L)));
}
 
Example 18
Source Project: presto   Source File: TestDomainConverter.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testBoolean()
{
    assertTupleDomainUnchanged(
            TupleDomain.withColumnDomains(
                    ImmutableMap.of(ICEBERG_COLUMN_PROVIDER.apply(BOOLEAN), Domain.singleValue(BOOLEAN, true))));

    assertTupleDomainUnchanged(
            TupleDomain.withColumnDomains(
                    ImmutableMap.of(ICEBERG_COLUMN_PROVIDER.apply(BOOLEAN), Domain.singleValue(BOOLEAN, false))));
}
 
Example 19
Source Project: presto   Source File: TestBackgroundHiveSplitLoader.java    License: Apache License 2.0 5 votes vote down vote up
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
        HdfsEnvironment hdfsEnvironment,
        TupleDomain<HiveColumnHandle> compactEffectivePredicate,
        Optional<HiveBucketFilter> hiveBucketFilter,
        Table table,
        Optional<HiveBucketHandle> bucketHandle,
        Optional<ValidWriteIdList> validWriteIds)
{
    List<HivePartitionMetadata> hivePartitionMetadatas =
            ImmutableList.of(
                    new HivePartitionMetadata(
                            new HivePartition(new SchemaTableName("testSchema", "table_name")),
                            Optional.empty(),
                            TableToPartitionMapping.empty()));

    return new BackgroundHiveSplitLoader(
            table,
            hivePartitionMetadatas,
            compactEffectivePredicate,
            TupleDomain::all,
            TYPE_MANAGER,
            createBucketSplitInfo(bucketHandle, hiveBucketFilter),
            SESSION,
            hdfsEnvironment,
            new NamenodeStats(),
            new CachingDirectoryLister(new HiveConfig()),
            EXECUTOR,
            2,
            false,
            false,
            validWriteIds);
}
 
Example 20
Source Project: presto   Source File: TestBackgroundHiveSplitLoader.java    License: Apache License 2.0 5 votes vote down vote up
private static BackgroundHiveSplitLoader backgroundHiveSplitLoader(
        List<LocatedFileStatus> files,
        TupleDomain<HiveColumnHandle> tupleDomain)
{
    return backgroundHiveSplitLoader(
            files,
            tupleDomain,
            Optional.empty(),
            SIMPLE_TABLE,
            Optional.empty());
}
 
Example 21
Source Project: presto   Source File: HiveMetadata.java    License: Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
static TupleDomain<ColumnHandle> createPredicate(List<ColumnHandle> partitionColumns, List<HivePartition> partitions)
{
    if (partitions.isEmpty()) {
        return TupleDomain.none();
    }

    return withColumnDomains(
            partitionColumns.stream()
                    .collect(toMap(identity(), column -> buildColumnDomain(column, partitions))));
}
 
Example 22
Source Project: presto   Source File: NodesSystemTable.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ConnectorPageSource pageSource(ConnectorTransactionHandle transaction, ConnectorSession session, TupleDomain<Integer> constraint)
{
    Set<ElasticsearchNode> nodes = client.getNodes();

    BlockBuilder nodeId = VARCHAR.createBlockBuilder(null, nodes.size());
    BlockBuilder prestoAddress = VARCHAR.createBlockBuilder(null, nodes.size());
    BlockBuilder elasticsearchNodeId = VARCHAR.createBlockBuilder(null, nodes.size());
    BlockBuilder elasticsearchAddress = VARCHAR.createBlockBuilder(null, nodes.size());

    for (ElasticsearchNode node : nodes) {
        VARCHAR.writeString(nodeId, currentNode.getNodeIdentifier());
        VARCHAR.writeString(prestoAddress, currentNode.getHostAndPort().toString());
        VARCHAR.writeString(elasticsearchNodeId, node.getId());

        if (node.getAddress().isPresent()) {
            VARCHAR.writeString(elasticsearchAddress, node.getAddress().get());
        }
        else {
            elasticsearchAddress.appendNull();
        }
    }

    return new FixedPageSource(ImmutableList.of(new Page(
            nodeId.build(),
            prestoAddress.build(),
            elasticsearchNodeId.build(),
            elasticsearchAddress.build())));
}
 
Example 23
Source Project: presto   Source File: ClassLoaderSafeConnectorMetadata.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Optional<ConnectorResolvedIndex> resolveIndex(ConnectorSession session, ConnectorTableHandle tableHandle, Set<ColumnHandle> indexableColumns, Set<ColumnHandle> outputColumns, TupleDomain<ColumnHandle> tupleDomain)
{
    try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(classLoader)) {
        return delegate.resolveIndex(session, tableHandle, indexableColumns, outputColumns, tupleDomain);
    }
}
 
Example 24
Source Project: presto   Source File: TableScanWorkProcessorOperator.java    License: Apache License 2.0 5 votes vote down vote up
SplitToPages(
        Session session,
        PageSourceProvider pageSourceProvider,
        TableHandle table,
        Iterable<ColumnHandle> columns,
        Supplier<TupleDomain<ColumnHandle>> dynamicFilter,
        AggregatedMemoryContext aggregatedMemoryContext)
{
    this.session = requireNonNull(session, "session is null");
    this.pageSourceProvider = requireNonNull(pageSourceProvider, "pageSourceProvider is null");
    this.table = requireNonNull(table, "table is null");
    this.columns = ImmutableList.copyOf(requireNonNull(columns, "columns is null"));
    this.dynamicFilter = requireNonNull(dynamicFilter, "dynamicFilter is null");
    this.aggregatedMemoryContext = requireNonNull(aggregatedMemoryContext, "aggregatedMemoryContext is null");
}
 
Example 25
Source Project: presto   Source File: LocalFileRecordCursor.java    License: Apache License 2.0 5 votes vote down vote up
private static FilesReader getFilesReader(LocalFileTables localFileTables, TupleDomain<LocalFileColumnHandle> predicate, SchemaTableName tableName)
{
    LocalFileTableHandle table = localFileTables.getTable(tableName);
    List<File> fileNames = localFileTables.getFiles(tableName);
    try {
        return new FilesReader(table.getTimestampColumn(), fileNames.iterator(), predicate);
    }
    catch (IOException e) {
        throw new UncheckedIOException(e);
    }
}
 
Example 26
Source Project: presto   Source File: AbstractTestHiveFileSystem.java    License: Apache License 2.0 5 votes vote down vote up
protected MaterializedResult readTable(SchemaTableName tableName)
        throws IOException
{
    try (Transaction transaction = newTransaction()) {
        ConnectorMetadata metadata = transaction.getMetadata();
        ConnectorSession session = newSession();

        ConnectorTableHandle table = getTableHandle(metadata, tableName);
        List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, table).values());

        metadata.beginQuery(session);
        ConnectorSplitSource splitSource = splitManager.getSplits(transaction.getTransactionHandle(), session, table, UNGROUPED_SCHEDULING);

        List<Type> allTypes = getTypes(columnHandles);
        List<Type> dataTypes = getTypes(columnHandles.stream()
                .filter(columnHandle -> !((HiveColumnHandle) columnHandle).isHidden())
                .collect(toImmutableList()));
        MaterializedResult.Builder result = MaterializedResult.resultBuilder(session, dataTypes);

        List<ConnectorSplit> splits = getAllSplits(splitSource);
        for (ConnectorSplit split : splits) {
            try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, table, columnHandles, TupleDomain.all())) {
                MaterializedResult pageSourceResult = materializeSourceDataStream(session, pageSource, allTypes);
                for (MaterializedRow row : pageSourceResult.getMaterializedRows()) {
                    Object[] dataValues = IntStream.range(0, row.getFieldCount())
                            .filter(channel -> !((HiveColumnHandle) columnHandles.get(channel)).isHidden())
                            .mapToObj(row::getField)
                            .toArray();
                    result.row(dataValues);
                }
            }
        }

        metadata.cleanupQuery(session);
        return result.build();
    }
}
 
Example 27
Source Project: presto   Source File: TestDomainTranslator.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testNoneRoundTrip()
{
    TupleDomain<Symbol> tupleDomain = TupleDomain.none();
    ExtractionResult result = fromPredicate(toPredicate(tupleDomain));
    assertEquals(result.getRemainingExpression(), TRUE_LITERAL);
    assertEquals(result.getTupleDomain(), tupleDomain);
}
 
Example 28
Source Project: presto   Source File: AccumuloTableHandle.java    License: Apache License 2.0 5 votes vote down vote up
public AccumuloTableHandle(
        String schema,
        String table,
        String rowId,
        boolean external,
        String serializerClassName,
        Optional<String> scanAuthorizations)
{
    this(schema, table, rowId, TupleDomain.all(), external, serializerClassName, scanAuthorizations);
}
 
Example 29
Source Project: presto   Source File: ElasticsearchPageSourceProvider.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public ConnectorPageSource createPageSource(
        ConnectorTransactionHandle transaction,
        ConnectorSession session,
        ConnectorSplit split,
        ConnectorTableHandle table,
        List<ColumnHandle> columns,
        TupleDomain<ColumnHandle> dynamicFilter)
{
    requireNonNull(split, "split is null");
    requireNonNull(table, "table is null");

    ElasticsearchTableHandle elasticsearchTable = (ElasticsearchTableHandle) table;
    ElasticsearchSplit elasticsearchSplit = (ElasticsearchSplit) split;

    if (elasticsearchTable.getType().equals(QUERY)) {
        return new PassthroughQueryPageSource(client, elasticsearchTable);
    }

    if (columns.isEmpty()) {
        return new CountQueryPageSource(client, session, elasticsearchTable, elasticsearchSplit);
    }

    return new ScanQueryPageSource(
            client,
            session,
            elasticsearchTable,
            elasticsearchSplit,
            columns.stream()
                    .map(ElasticsearchColumnHandle.class::cast)
                    .collect(toImmutableList()));
}
 
Example 30
Source Project: presto   Source File: TestDynamicFilterSourceOperator.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testCollectTooMuchRows()
{
    final int maxRowCount = getDynamicFilteringMaxPerDriverRowCount(pipelineContext.getSession());
    Page largePage = createSequencePage(ImmutableList.of(BIGINT), maxRowCount + 1);

    OperatorFactory operatorFactory = createOperatorFactory(channel(0, BIGINT));
    verifyPassthrough(createOperator(operatorFactory),
            ImmutableList.of(BIGINT),
            largePage);
    operatorFactory.noMoreOperators();
    assertEquals(partitions.build(), ImmutableList.of(TupleDomain.all()));
}