Java Code Examples for org.apache.phoenix.util.SchemaUtil#getTableNameAsBytes()

The following examples show how to use org.apache.phoenix.util.SchemaUtil#getTableNameAsBytes() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ConnectionlessQueryServicesImpl.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static byte[] getTableName(List<Mutation> tableMetaData, byte[] physicalTableName) {
    if (physicalTableName != null) {
        return physicalTableName;
    }
    byte[][] rowKeyMetadata = new byte[3][];
    Mutation m = MetaDataUtil.getTableHeaderRow(tableMetaData);
    byte[] key = m.getRow();
    SchemaUtil.getVarChars(key, rowKeyMetadata);
    byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
    byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
    return SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
}
 
Example 2
Source File: FromCompiler.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public static ColumnResolver getResolverForCreation(final CreateTableStatement statement, final PhoenixConnection connection)
        throws SQLException {
    TableName baseTable = statement.getBaseTableName();
    if (baseTable == null) {
        return EMPTY_TABLE_RESOLVER;
    }
    NamedTableNode tableNode = NamedTableNode.create(null, baseTable, Collections.<ColumnDef>emptyList());
    // Always use non-tenant-specific connection here
    try {
        SingleTableColumnResolver visitor = new SingleTableColumnResolver(connection, tableNode, true);
        return visitor;
    } catch (TableNotFoundException e) {
        // Used for mapped VIEW, since we won't be able to resolve that.
        // Instead, we create a table with just the dynamic columns.
        // A tenant-specific connection may not create a mapped VIEW.
        if (connection.getTenantId() == null && statement.getTableType() == PTableType.VIEW) {
            ConnectionQueryServices services = connection.getQueryServices();
            byte[] fullTableName = SchemaUtil.getTableNameAsBytes(baseTable.getSchemaName(), baseTable.getTableName());
            HTableInterface htable = null;
            try {
                htable = services.getTable(fullTableName);
            } catch (UnsupportedOperationException ignore) {
                throw e; // For Connectionless
            } finally {
                if (htable != null) Closeables.closeQuietly(htable);
            }
            tableNode = NamedTableNode.create(null, baseTable, statement.getColumnDefs());
            return new SingleTableColumnResolver(connection, tableNode, e.getTimeStamp());
        }
        throw e;
    }
}
 
Example 3
Source File: ConnectionlessQueryServicesImpl.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static byte[] getTableName(List<Mutation> tableMetaData, byte[] physicalTableName) {
    if (physicalTableName != null) {
        return physicalTableName;
    }
    byte[][] rowKeyMetadata = new byte[3][];
    Mutation m = MetaDataUtil.getTableHeaderRow(tableMetaData);
    byte[] key = m.getRow();
    SchemaUtil.getVarChars(key, rowKeyMetadata);
    byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
    byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
    return SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
}
 
Example 4
Source File: DropMetadataIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testDropViewKeepsHTable() throws Exception {
    Connection conn = getConnection();
    Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin();
    String hbaseNativeViewName = generateUniqueName();

    byte[] hbaseNativeBytes = SchemaUtil.getTableNameAsBytes(HBASE_NATIVE_SCHEMA_NAME, hbaseNativeViewName);
    try {
         TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(hbaseNativeBytes));
        ColumnFamilyDescriptor columnDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_NAME)
                .setKeepDeletedCells(KeepDeletedCells.TRUE).build();
        builder.addColumnFamily(columnDescriptor);
        admin.createTable(builder.build());
    } finally {
        admin.close();
    }
    
    conn.createStatement().execute("create view " + hbaseNativeViewName+
            "   (uint_key unsigned_int not null," +
            "    ulong_key unsigned_long not null," +
            "    string_key varchar not null,\n" +
            "    \"1\".uint_col unsigned_int," +
            "    \"1\".ulong_col unsigned_long" +
            "    CONSTRAINT pk PRIMARY KEY (uint_key, ulong_key, string_key))\n" +
            ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING + "='" + DataBlockEncoding.NONE + "'");
    conn.createStatement().execute("drop view " + hbaseNativeViewName);
    conn.close();
}
 
Example 5
Source File: ConnectionQueryServicesImpl.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Override
public MetaDataMutationResult dropTable(final List<Mutation> tableMetaData, final PTableType tableType, final boolean cascade) throws SQLException {
    byte[][] rowKeyMetadata = new byte[3][];
    SchemaUtil.getVarChars(tableMetaData.get(0).getRow(), rowKeyMetadata);
    byte[] tenantIdBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TENANT_ID_INDEX];
    byte[] schemaBytes = rowKeyMetadata[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX];
    byte[] tableBytes = rowKeyMetadata[PhoenixDatabaseMetaData.TABLE_NAME_INDEX];
    byte[] tableKey = SchemaUtil.getTableKey(tenantIdBytes == null ? ByteUtil.EMPTY_BYTE_ARRAY : tenantIdBytes, schemaBytes, tableBytes);
    final MetaDataMutationResult result =  metaDataCoprocessorExec(tableKey,
            new Batch.Call<MetaDataService, MetaDataResponse>() {
                @Override
                public MetaDataResponse call(MetaDataService instance) throws IOException {
                    ServerRpcController controller = new ServerRpcController();
                    BlockingRpcCallback<MetaDataResponse> rpcCallback =
                            new BlockingRpcCallback<MetaDataResponse>();
                    DropTableRequest.Builder builder = DropTableRequest.newBuilder();
                    for (Mutation m : tableMetaData) {
                        MutationProto mp = ProtobufUtil.toProto(m);
                        builder.addTableMetadataMutations(mp.toByteString());
                    }
                    builder.setTableType(tableType.getSerializedValue());
                    builder.setCascade(cascade);

                    instance.dropTable(controller, builder.build(), rpcCallback);
                    if(controller.getFailedOn() != null) {
                        throw controller.getFailedOn();
                    }
                    return rpcCallback.get();
                }
            });

    final MutationCode code = result.getMutationCode();
    switch(code) {
    case TABLE_ALREADY_EXISTS:
        ReadOnlyProps props = this.getProps();
        boolean dropMetadata = props.getBoolean(DROP_METADATA_ATTRIB, DEFAULT_DROP_METADATA);
        if (dropMetadata) {
            dropTables(result.getTableNamesToDelete());
        }
        invalidateTables(result.getTableNamesToDelete());
        if (tableType == PTableType.TABLE) {
            byte[] physicalName = SchemaUtil.getTableNameAsBytes(schemaBytes, tableBytes);
            long timestamp = MetaDataUtil.getClientTimeStamp(tableMetaData);
            ensureViewIndexTableDropped(physicalName, timestamp);
            ensureLocalIndexTableDropped(physicalName, timestamp);
            tableStatsCache.invalidate(new ImmutableBytesPtr(physicalName));
        }
        break;
    default:
        break;
    }
      return result;
}
 
Example 6
Source File: QueryDatabaseMetaDataIT.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Test
public void testCreateOnExistingTable() throws Exception {
    PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
    String tableName = MDTEST_NAME;
    String schemaName = MDTEST_SCHEMA_NAME;
    byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
    byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
    byte[] cfC = Bytes.toBytes("c");
    byte[][] familyNames = new byte[][] {cfB, cfC};
    byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
    HBaseAdmin admin = pconn.getQueryServices().getAdmin();
    try {
        admin.disableTable(htableName);
        admin.deleteTable(htableName);
        admin.enableTable(htableName);
    } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
    }
    
    @SuppressWarnings("deprecation")
    HTableDescriptor descriptor = new HTableDescriptor(htableName);
    for (byte[] familyName : familyNames) {
        HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
        descriptor.addFamily(columnDescriptor);
    }
    admin.createTable(descriptor);
        
    long ts = nextTimestamp();
    Properties props = new Properties();
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
    PhoenixConnection conn1 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
    ensureTableCreated(getUrl(), tableName, null, ts);
    
    descriptor = admin.getTableDescriptor(htableName);
    assertEquals(3,descriptor.getColumnFamilies().length);
    HColumnDescriptor cdA = descriptor.getFamily(cfA);
    assertNotEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCellsAsEnum());
    assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using WITH
    assertEquals(1,cdA.getMaxVersions());// Overriden using WITH
    HColumnDescriptor cdB = descriptor.getFamily(cfB);
    // Allow KEEP_DELETED_CELLS to be false for VIEW
    assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCellsAsEnum());
    assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the original value.
    // CF c should stay the same since it's not a Phoenix cf.
    HColumnDescriptor cdC = descriptor.getFamily(cfC);
    assertNotNull("Column family not found", cdC);
    assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCellsAsEnum());
    assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
    assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
    admin.close();
     
    int rowCount = 5;
    String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
    PreparedStatement ps = conn1.prepareStatement(upsert);
    for (int i = 0; i < rowCount; i++) {
        ps.setString(1, Integer.toString(i));
        ps.setInt(2, i+1);
        ps.setInt(3, i+2);
        ps.execute();
    }
    conn1.commit();
    conn1.close();
    
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
    Connection conn2 = DriverManager.getConnection(getUrl(), props);
    String query = "SELECT count(1) FROM " + tableName;
    ResultSet rs = conn2.createStatement().executeQuery(query);
    assertTrue(rs.next());
    assertEquals(rowCount, rs.getLong(1));
    
    query = "SELECT id, col1,col2 FROM " + tableName;
    rs = conn2.createStatement().executeQuery(query);
    for (int i = 0; i < rowCount; i++) {
        assertTrue(rs.next());
        assertEquals(Integer.toString(i),rs.getString(1));
        assertEquals(i+1, rs.getInt(2));
        assertEquals(i+2, rs.getInt(3));
    }
    assertFalse(rs.next());
    conn2.close();
}
 
Example 7
Source File: QueryDatabaseMetaDataIT.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Test
public void testCreateOnExistingTable() throws Exception {
    try (PhoenixConnection pconn =
            DriverManager.getConnection(getUrl()).unwrap(PhoenixConnection.class)) {
        String tableName = generateUniqueName();// MDTEST_NAME;
        String schemaName = "";// MDTEST_SCHEMA_NAME;
        byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
        byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
        byte[] cfC = Bytes.toBytes("c");
        byte[][] familyNames = new byte[][] { cfB, cfC };
        byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
        Admin admin = pconn.getQueryServices().getAdmin();
        try {
            admin.disableTable(TableName.valueOf(htableName));
            admin.deleteTable(TableName.valueOf(htableName));
            admin.enableTable(TableName.valueOf(htableName));
        } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
        }

        TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(htableName));
        for (byte[] familyName : familyNames) {
            builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName));
        }
        admin.createTable(builder.build());
        createMDTestTable(pconn, tableName,
            "a." + ColumnFamilyDescriptorBuilder.BLOCKSIZE+ "=" + 50000);

        TableDescriptor descriptor = admin.getDescriptor(TableName.valueOf(htableName));
        assertEquals(3, descriptor.getColumnFamilies().length);
        ColumnFamilyDescriptor cdA = descriptor.getColumnFamily(cfA);
        assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCells());
        assertNotEquals(ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKSIZE, cdA.getBlocksize());
        assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using
                                                                          // WITH
        assertEquals(1, cdA.getMaxVersions());// Overriden using WITH
        ColumnFamilyDescriptor cdB = descriptor.getColumnFamily(cfB);
        // Allow KEEP_DELETED_CELLS to be false for VIEW
        assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCells());
        assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKSIZE, cdB.getBlocksize());
        assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the
                                                                          // original value.
        // CF c should stay the same since it's not a Phoenix cf.
        ColumnFamilyDescriptor cdC = descriptor.getColumnFamily(cfC);
        assertNotNull("Column family not found", cdC);
        assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCells());
        assertEquals(ColumnFamilyDescriptorBuilder.DEFAULT_BLOCKSIZE, cdC.getBlocksize());
        assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
        assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
        assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
        assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
        admin.close();

        int rowCount = 5;
        String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
        PreparedStatement ps = pconn.prepareStatement(upsert);
        for (int i = 0; i < rowCount; i++) {
            ps.setString(1, Integer.toString(i));
            ps.setInt(2, i + 1);
            ps.setInt(3, i + 2);
            ps.execute();
        }
        pconn.commit();
        String query = "SELECT count(1) FROM " + tableName;
        ResultSet rs = pconn.createStatement().executeQuery(query);
        assertTrue(rs.next());
        assertEquals(rowCount, rs.getLong(1));

        query = "SELECT id, col1,col2 FROM " + tableName;
        rs = pconn.createStatement().executeQuery(query);
        for (int i = 0; i < rowCount; i++) {
            assertTrue(rs.next());
            assertEquals(Integer.toString(i), rs.getString(1));
            assertEquals(i + 1, rs.getInt(2));
            assertEquals(i + 2, rs.getInt(3));
        }
        assertFalse(rs.next());
    }
}