org.apache.phoenix.schema.PColumn Java Examples

The following examples show how to use org.apache.phoenix.schema.PColumn. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IndexColumnNames.java    From phoenix with Apache License 2.0 6 votes vote down vote up
private String getDataTypeString(PColumn col) {
    PDataType<?> dataType = col.getDataType();
    switch (dataType.getSqlType()) {
    case Types.DECIMAL:
        String typeStr = dataType.toString();
        if (col.getMaxLength() != null) {
            typeStr += "(" + col.getMaxLength().toString();
            if (col.getScale() != null) {
                typeStr += "," + col.getScale().toString();
            }
            typeStr += ")";
        }
        return typeStr;
    default:
        if (col.getMaxLength() != null) {
            return String.format("%s(%s)", dataType.toString(), col.getMaxLength());
        }
        return dataType.toString();
    }
}
 
Example #2
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 6 votes vote down vote up
/**
 * Get expression that may be used to evaluate the tenant ID of a given row in a
 * multi-tenant table. Both the SYSTEM.CATALOG table and the SYSTEM.SEQUENCE
 * table are considered multi-tenant.
 * @param conn open Phoenix connection
 * @param fullTableName full table name
 * @return An expression that may be evaluated for a row in the provided table or
 * null if the table is not a multi-tenant table. 
 * @throws SQLException if the table name is not found, a TableNotFoundException
 * is thrown. If a multi-tenant local index is supplied a SQLFeatureNotSupportedException
 * is thrown.
 */
public static Expression getTenantIdExpression(Connection conn, String fullTableName) throws SQLException {
    PTable table = getTable(conn, fullTableName);
    // TODO: consider setting MULTI_TENANT = true for SYSTEM.CATALOG and SYSTEM.SEQUENCE
    if (!SchemaUtil.isMetaTable(table) && !SchemaUtil.isSequenceTable(table) && !table.isMultiTenant()) {
        return null;
    }
    if (table.getIndexType() == IndexType.LOCAL) {
        /*
         * With some hackery, we could deduce the tenant ID from a multi-tenant local index,
         * however it's not clear that we'd want to maintain the same prefixing of the region
         * start key, as the region boundaries may end up being different on a cluster being
         * replicated/backed-up to (which is the use case driving the method).
         */
        throw new SQLFeatureNotSupportedException();
    }
    
    int pkPosition = table.getBucketNum() == null ? 0 : 1;
    List<PColumn> pkColumns = table.getPKColumns();
    return new RowKeyColumnExpression(pkColumns.get(pkPosition), new RowKeyValueAccessor(pkColumns, pkPosition));
}
 
Example #3
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 6 votes vote down vote up
private static List<PColumn> getPkColumns(PTable ptable, Connection conn) throws SQLException {
    PhoenixConnection pConn = conn.unwrap(PhoenixConnection.class);
    List<PColumn> pkColumns = ptable.getPKColumns();
    
    // Skip the salting column and the view index id column if present.
    // Skip the tenant id column too if the connection is tenant specific and the table used by the query plan is multi-tenant
    int offset = (ptable.getBucketNum() == null ? 0 : 1) + (ptable.isMultiTenant() && pConn.getTenantId() != null ? 1 : 0) + (ptable.getViewIndexId() == null ? 0 : 1);
    
    // get a sublist of pkColumns by skipping the offset columns.
    pkColumns = pkColumns.subList(offset, pkColumns.size());
    
    if (ptable.getType() == PTableType.INDEX) {
        // index tables have the same schema name as their parent/data tables.
        String fullDataTableName = ptable.getParentName().getString();
        
        // Get the corresponding columns of the data table.
        List<PColumn> dataColumns = IndexUtil.getDataColumns(fullDataTableName, pkColumns, pConn);
        pkColumns = dataColumns;
    }
    return pkColumns;
}
 
Example #4
Source File: ColumnExpressionTest.java    From phoenix with Apache License 2.0 6 votes vote down vote up
@Test
public void testSerialization() throws Exception {
    int maxLen = 30;
    int scale = 5;
    PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, maxLen, scale,
            true, 20, SortOrder.getDefault(), 0, null, false, null);
    ColumnExpression colExp = new KeyValueColumnExpression(column);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dOut = new DataOutputStream(baos);
    colExp.write(dOut);
    dOut.flush();

    ColumnExpression colExp2 = new KeyValueColumnExpression();
    byte[] bytes = baos.toByteArray();
    DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
    colExp2.readFields(dIn);
    assertEquals(maxLen, colExp2.getMaxLength().intValue());
    assertEquals(scale, colExp2.getScale().intValue());
    assertEquals(PDecimal.INSTANCE, colExp2.getDataType());
}
 
Example #5
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 6 votes vote down vote up
/**
 * 
 * @param conn connection that was used for reading/generating value.
 * @param fullTableName fully qualified table name
 * @param values values of the columns
 * @param columns list of pair of column that includes column family as first part and column name as the second part.
 * Column family is optional and hence nullable. Columns in the list have to be in the same order as the order of occurence
 * of their values in the object array.
 * @return values encoded in a byte array 
 * @throws SQLException
 * @see {@link #decodeValues(Connection, String, byte[], List)}
 */
public static byte[] encodeColumnValues(Connection conn, String fullTableName, Object[] values, List<Pair<String, String>> columns) throws SQLException {
    PTable table = getTable(conn, fullTableName);
    List<PColumn> pColumns = getColumns(table, columns);
    List<Expression> expressions = new ArrayList<Expression>(pColumns.size());
    int i = 0;
    for (PColumn col : pColumns) {
        Object value = values[i];
        // for purposes of encoding, sort order of the columns doesn't matter.
        Expression expr = LiteralExpression.newConstant(value, col.getDataType(), col.getMaxLength(), col.getScale());
        expressions.add(expr);
        i++;
    }
    KeyValueSchema kvSchema = buildKeyValueSchema(pColumns);
    ImmutableBytesWritable ptr = new ImmutableBytesWritable();
    ValueBitSet valueSet = ValueBitSet.newInstance(kvSchema);
    return kvSchema.toBytes(expressions.toArray(new Expression[0]), valueSet, ptr);
}
 
Example #6
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 6 votes vote down vote up
@Deprecated
private static List<PColumn> getPkColumns(PTable ptable, Connection conn, boolean forDataTable) throws SQLException {
    PhoenixConnection pConn = conn.unwrap(PhoenixConnection.class);
    List<PColumn> pkColumns = ptable.getPKColumns();
    
    // Skip the salting column and the view index id column if present.
    // Skip the tenant id column too if the connection is tenant specific and the table used by the query plan is multi-tenant
    int offset = (ptable.getBucketNum() == null ? 0 : 1) + (ptable.isMultiTenant() && pConn.getTenantId() != null ? 1 : 0) + (ptable.getViewIndexId() == null ? 0 : 1);
    
    // get a sublist of pkColumns by skipping the offset columns.
    pkColumns = pkColumns.subList(offset, pkColumns.size());
    
    if (ptable.getType() == PTableType.INDEX && forDataTable) {
        // index tables have the same schema name as their parent/data tables.
        String fullDataTableName = ptable.getParentName().getString();
        
        // Get the corresponding columns of the data table.
        List<PColumn> dataColumns = IndexUtil.getDataColumns(fullDataTableName, pkColumns, pConn);
        pkColumns = dataColumns;
    }
    return pkColumns;
}
 
Example #7
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 6 votes vote down vote up
/**
 * @param columns - Initialized empty list to be filled with the pairs of column family name and column name for columns that are used 
 * as row key for the query plan. Column family names are optional and hence the first part of the pair is nullable.
 * Column names and family names are enclosed in double quotes to allow for case sensitivity and for presence of 
 * special characters. Salting column and view index id column are not included. If the connection is tenant specific 
 * and the table used by the query plan is multi-tenant, then the tenant id column is not included as well.
 * @param dataTypes - Initialized empty list to be filled with the corresponding data type for the columns in @param columns.
 * @param plan - query plan to get info for
 * @param conn - phoenix connection used to generate the query plan. Caller should take care of closing the connection appropriately.
 * @param forDataTable - if true, then column names and data types correspond to the data table even if the query plan uses
 * the secondary index table. If false, and if the query plan uses the secondary index table, then the column names and data 
 * types correspond to the index table.
 * @throws SQLException
 */
@Deprecated
public static void getPkColsDataTypesForSql(List<Pair<String, String>> columns, List<String> dataTypes, QueryPlan plan, Connection conn, boolean forDataTable) throws SQLException {
    checkNotNull(columns);
    checkNotNull(dataTypes);
    checkNotNull(plan);
    checkNotNull(conn);
    List<PColumn> pkColumns = getPkColumns(plan.getTableRef().getTable(), conn, forDataTable);
    String columnName;
    String familyName;
    for (PColumn pCol : pkColumns) {
        String sqlTypeName = getSqlTypeName(pCol);
        dataTypes.add(sqlTypeName);
        columnName = addQuotes(pCol.getName().getString());
        familyName = pCol.getFamilyName() != null ? addQuotes(pCol.getFamilyName().getString()) : null;
        columns.add(new Pair<String, String>(familyName, columnName));
    }
}
 
Example #8
Source File: UpsertCompiler.java    From phoenix with Apache License 2.0 6 votes vote down vote up
public UpsertValuesMutationPlan(StatementContext context, TableRef tableRef, int nodeIndexOffset,
                                List<Expression> constantExpressions, List<PColumn> allColumns,
                                int[] columnIndexes, Set<PColumn> overlapViewColumns, byte[][] values,
                                Set<PColumn> addViewColumns, PhoenixConnection connection,
                                int[] pkSlotIndexes, boolean useServerTimestamp, byte[] onDupKeyBytes,
                                int maxSize, int maxSizeBytes) {
    this.context = context;
    this.tableRef = tableRef;
    this.nodeIndexOffset = nodeIndexOffset;
    this.constantExpressions = constantExpressions;
    this.allColumns = allColumns;
    this.columnIndexes = columnIndexes;
    this.overlapViewColumns = overlapViewColumns;
    this.values = values;
    this.addViewColumns = addViewColumns;
    this.connection = connection;
    this.pkSlotIndexes = pkSlotIndexes;
    this.useServerTimestamp = useServerTimestamp;
    this.onDupKeyBytes = onDupKeyBytes;
    this.maxSize = maxSize;
    this.maxSizeBytes = maxSizeBytes;
}
 
Example #9
Source File: ColumnExpressionTest.java    From phoenix with Apache License 2.0 6 votes vote down vote up
@Test
public void testSerializationWithNullScaleAndMaxLength() throws Exception {
    PColumn column = new PColumnImpl(PNameFactory.newName("c1"), PNameFactory.newName("f1"), PDecimal.INSTANCE, null, null, true,
            20, SortOrder.getDefault(), 0, null, false, null);
    ColumnExpression colExp = new KeyValueColumnExpression(column);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream dOut = new DataOutputStream(baos);
    colExp.write(dOut);
    dOut.flush();

    ColumnExpression colExp2 = new KeyValueColumnExpression();
    byte[] bytes = baos.toByteArray();
    DataInputStream dIn = new DataInputStream(new ByteArrayInputStream(bytes, 0, bytes.length));
    colExp2.readFields(dIn);
    assertNull(colExp2.getMaxLength());
    assertNull(colExp2.getScale());
}
 
Example #10
Source File: SchemaUtil.java    From phoenix with Apache License 2.0 6 votes vote down vote up
public static byte[][] processSplits(byte[][] splits, LinkedHashSet<PColumn> pkColumns, Integer saltBucketNum, boolean defaultRowKeyOrder) throws SQLException {
    // FIXME: shouldn't this return if splits.length == 0?
    if (splits == null) return null;
    // We do not accept user specified splits if the table is salted and we specify defaultRowKeyOrder. In this case,
    // throw an exception.
    if (splits.length > 0 && saltBucketNum != null && defaultRowKeyOrder) {
        throw new SQLExceptionInfo.Builder(SQLExceptionCode.NO_SPLITS_ON_SALTED_TABLE).build().buildException();
    }
    // If the splits are not specified and table is salted, pre-split the table. 
    if (splits.length == 0 && saltBucketNum != null) {
        splits = SaltingUtil.getSalteByteSplitPoints(saltBucketNum);
    }
    byte[][] newSplits = new byte[splits.length][];
    for (int i=0; i<splits.length; i++) {
        newSplits[i] = processSplit(splits[i], pkColumns); 
    }
    return newSplits;
}
 
Example #11
Source File: WhereCompilerTest.java    From phoenix with Apache License 2.0 6 votes vote down vote up
@Test
public void testAndPKAndNotPK() throws SQLException {
    String query = "select * from bugTable where ID = 'i2' and company = 'c3'";
    PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
    pconn.createStatement().execute("create table bugTable(ID varchar primary key,company varchar)");
    PhoenixPreparedStatement pstmt = newPreparedStatement(pconn, query);
    QueryPlan plan = pstmt.optimizeQuery();
    Scan scan = plan.getContext().getScan();
    Filter filter = scan.getFilter();
    PColumn column = plan.getTableRef().getTable().getColumnForColumnName("COMPANY");
    assertEquals(
            singleKVFilter(constantComparison(
                CompareOp.EQUAL,
                new KeyValueColumnExpression(column),
                "c3")),
            filter);
}
 
Example #12
Source File: PostDDLCompiler.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public PostDDLMutationPlan(StatementContext context, List<TableRef> tableRefs, long timestamp, byte[] emptyCF, List<PColumn> deleteList, List<byte[]> projectCFs) {
    super(context, Operation.UPSERT);
    this.context = context;
    this.tableRefs = tableRefs;
    this.timestamp = timestamp;
    this.emptyCF = emptyCF;
    this.deleteList = deleteList;
    this.projectCFs = projectCFs;
}
 
Example #13
Source File: SchemaUtil.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * Estimate the max key length in bytes of the PK for a given table
 * @param table the table
 * @return the max PK length
 */
public static int estimateKeyLength(PTable table) {
    int maxKeyLength = 0;
    // Calculate the max length of a key (each part must currently be of a fixed width)
    int i = 0;
    List<PColumn> columns = table.getPKColumns();
    while (i < columns.size()) {
        PColumn keyColumn = columns.get(i++);
        PDataType type = keyColumn.getDataType();
        Integer maxLength = keyColumn.getMaxLength();
        maxKeyLength += !type.isFixedWidth() ? VAR_LENGTH_ESTIMATE : maxLength == null ? type.getByteSize() : maxLength;
    }
    return maxKeyLength;
}
 
Example #14
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static KeyValueSchema buildKeyValueSchema(List<PColumn> columns) {
    KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(getMinNullableIndex(columns));
    for (PColumn col : columns) {
        builder.addField(col);
    }
    return builder.build();
}
 
Example #15
Source File: ScanRegionObserver.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * We store the metadata for each dynamic cell in a separate cell in the same column family.
 * The column qualifier for this cell is:
 * {@link ScanRegionObserver#DYN_COLS_METADATA_CELL_QUALIFIER} concatenated with the
 * qualifier of the actual dynamic column
 * @param dynColProto Protobuf representation of the dynamic column PColumn
 * @return Final qualifier for the metadata cell
 * @throws IOException If an I/O error occurs when parsing the byte array output stream
 */
private static byte[] getQualifierForDynamicColumnMetaDataCell(PTableProtos.PColumn dynColProto)
throws IOException {
    PColumn dynCol = PColumnImpl.createFromProto(dynColProto);
    ByteArrayOutputStream qual = new ByteArrayOutputStream();
    qual.write(DYN_COLS_METADATA_CELL_QUALIFIER);
    qual.write(dynCol.getColumnQualifierBytes());
    if (LOGGER.isTraceEnabled()) {
        LOGGER.trace("Storing shadow cell for dynamic column metadata for dynamic column : " +
                dynCol.getFamilyName().getString() + "." + dynCol.getName().getString());
    }
    return qual.toByteArray();
}
 
Example #16
Source File: QueryCompilerTest.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Test
public void testSameColumnNameInPKAndNonPK() throws Exception {
    Connection conn = DriverManager.getConnection(getUrl());
    try {
        String query = "CREATE TABLE t1 (k integer not null primary key, a.k decimal, b.k decimal)";
        conn.createStatement().execute(query);
        PhoenixConnection pconn = conn.unwrap(PhoenixConnection.class);
        PColumn c = pconn.getMetaDataCache().getTable(new PTableKey(pconn.getTenantId(), "T1")).getColumn("K");
        assertTrue(SchemaUtil.isPKColumn(c));
    } finally {
        conn.close();
    }
}
 
Example #17
Source File: SchemaUtil.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * 
 * @return a fully qualified column name in the format: "CFNAME"."COLNAME" or "COLNAME" depending on whether or not
 * there is a column family name present. 
 */
public static String getQuotedFullColumnName(PColumn pCol) {
    checkNotNull(pCol);
    String columnName = pCol.getName().getString();
    String columnFamilyName = pCol.getFamilyName() != null ? pCol.getFamilyName().getString() : null;
    return getQuotedFullColumnName(columnFamilyName, columnName);
}
 
Example #18
Source File: IndexUtil.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public static boolean getViewConstantValue(PColumn column, ImmutableBytesWritable ptr) {
    byte[] value = column.getViewConstant();
    if (value != null) {
        ptr.set(value, 0, value.length-1);
        return true;
    }
    return false;
}
 
Example #19
Source File: FromCompiler.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public ColumnRef resolveColumn(String schemaName, String tableName,
		String colName) throws SQLException {
	TableRef tableRef = tableRefs.get(0);
	boolean resolveCF = false;
	if (schemaName != null || tableName != null) {
	    String resolvedTableName = tableRef.getTable().getTableName().getString();
	    String resolvedSchemaName = tableRef.getTable().getSchemaName().getString();
	    if (schemaName != null && tableName != null) {
                  if ( ! ( schemaName.equals(resolvedSchemaName)  &&
                           tableName.equals(resolvedTableName) )) {
                      if (!(resolveCF = schemaName.equals(alias))) {
                          throw new ColumnNotFoundException(schemaName, tableName, null, colName);
                      }
                  }
	    } else { // schemaName == null && tableName != null
                  if (tableName != null && !tableName.equals(alias) && (!tableName.equals(resolvedTableName) || !resolvedSchemaName.equals(""))) {
                      resolveCF = true;
                 }
	    }

	}
      	PColumn column = resolveCF
      	        ? tableRef.getTable().getColumnFamily(tableName).getPColumnForColumnName(colName)
      			: tableRef.getTable().getColumnForColumnName(colName);
          return new ColumnRef(tableRef, column.getPosition());
}
 
Example #20
Source File: SchemaUtil.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * 
 * @return a fully qualified column name in the format: "CFNAME"."COLNAME" or "COLNAME" depending on whether or not
 * there is a column family name present. 
 */
public static String getQuotedFullColumnName(PColumn pCol) {
    checkNotNull(pCol);
    String columnName = pCol.getName().getString();
    String columnFamilyName = pCol.getFamilyName() != null ? pCol.getFamilyName().getString() : null;
    return getQuotedFullColumnName(columnFamilyName, columnName);
}
 
Example #21
Source File: TupleProjector.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public TupleProjector(ProjectedPTableWrapper projected) {
	List<PColumn> columns = projected.getTable().getColumns();
	expressions = new Expression[columns.size() - projected.getTable().getPKColumns().size()];
	// we do not count minNullableIndex for we might do later merge.
	KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
	int i = 0;
    for (PColumn column : projected.getTable().getColumns()) {
    	if (!SchemaUtil.isPKColumn(column)) {
    		builder.addField(column);
    		expressions[i++] = projected.getSourceExpression(column);
    	}
    }
    schema = builder.build();
    valueSet = ValueBitSet.newInstance(schema);
}
 
Example #22
Source File: UpsertCompiler.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static LiteralParseNode getNodeForRowTimestampColumn(PColumn col) {
    PDataType type = col.getDataType();
    long dummyValue = 0L;
    if (type.isCoercibleTo(PTimestamp.INSTANCE)) {
        return new LiteralParseNode(new Timestamp(dummyValue), PTimestamp.INSTANCE);
    } else if (type == PLong.INSTANCE || type == PUnsignedLong.INSTANCE) {
        return new LiteralParseNode(dummyValue, PLong.INSTANCE);
    }
    throw new IllegalArgumentException();
}
 
Example #23
Source File: PhoenixRuntime.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * @param table table to get the {@code PColumn} for
 * @param columns list of pair of column that includes column family as first part and column name as the second part.
 * Column family is optional and hence nullable. 
 * @return list of {@code PColumn} for fullyQualifiedColumnNames
 * @throws SQLException 
 */
@Deprecated
private static List<PColumn> getPColumns(PTable table, List<Pair<String, String>> columns) throws SQLException {
    List<PColumn> pColumns = new ArrayList<PColumn>(columns.size());
    for (Pair<String, String> column : columns) {
        pColumns.add(getPColumn(table, column.getFirst(), column.getSecond()));
    }
    return pColumns;
}
 
Example #24
Source File: ColumnParseNode.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void toSQL(ColumnResolver resolver, StringBuilder buf) {
    // If resolver is not null, then resolve to get fully qualified name
    String tableName = null;
    if (resolver == null) {
        if (this.tableName != null) {
            tableName = this.tableName.getTableName();
        }
    } else {
        try {
            ColumnRef ref = resolver.resolveColumn(this.getSchemaName(), this.getTableName(), this.getName());
            PColumn column = ref.getColumn();
            if (!SchemaUtil.isPKColumn(column)) {
                PTable table = ref.getTable();
                String defaultFamilyName = table.getDefaultFamilyName() == null ? QueryConstants.DEFAULT_COLUMN_FAMILY : table.getDefaultFamilyName().getString();
                // Translate to the data table column name
                String dataFamilyName = column.getFamilyName().getString() ;
                tableName = defaultFamilyName.equals(dataFamilyName) ? null : dataFamilyName;
            }
            
        } catch (SQLException e) {
            throw new RuntimeException(e); // Already resolved, so not possible
        }
    }
    if (tableName != null) {
        if (isTableNameCaseSensitive()) {
            buf.append('"');
            buf.append(tableName);
            buf.append('"');
        } else {
            buf.append(tableName);
        }
        buf.append('.');
    }
    toSQL(buf);
}
 
Example #25
Source File: IndexExpressionParseNodeRewriter.java    From phoenix with Apache License 2.0 5 votes vote down vote up
public IndexExpressionParseNodeRewriter(PTable index, PhoenixConnection connection) throws SQLException {
      indexedParseNodeToColumnParseNodeMap = Maps.newHashMapWithExpectedSize(index.getColumns().size());
      NamedTableNode tableNode = NamedTableNode.create(null,
              TableName.create(index.getParentSchemaName().getString(), index.getParentTableName().getString()),
              Collections.<ColumnDef> emptyList());
      ColumnResolver dataResolver = FromCompiler.getResolver(tableNode, connection);
      StatementContext context = new StatementContext(new PhoenixStatement(connection), dataResolver);
      IndexStatementRewriter rewriter = new IndexStatementRewriter(dataResolver, null);
      ExpressionCompiler expressionCompiler = new ExpressionCompiler(context);
      ColumnParseNodeVisitor columnParseNodeVisitor = new ColumnParseNodeVisitor();
      int indexPosOffset = (index.getBucketNum() == null ? 0 : 1) + (index.isMultiTenant() ? 1 : 0) + (index.getViewIndexId() == null ? 0 : 1);
      List<PColumn> pkColumns = index.getPKColumns();
for (int i=indexPosOffset; i<pkColumns.size(); ++i) {
      	PColumn column = pkColumns.get(i);
      	String expressionStr = IndexUtil.getIndexColumnExpressionStr(column);
          ParseNode expressionParseNode  = SQLParser.parseCondition(expressionStr);
          columnParseNodeVisitor.reset();
          expressionParseNode.accept(columnParseNodeVisitor);
          String colName = column.getName().getString();
          if (columnParseNodeVisitor.isParseNodeCaseSensitive()) {
              // force column name to be case sensitive name by surround with double quotes
              colName = "\"" + colName + "\"";
          }
          
          Expression dataExpression = expressionParseNode.accept(expressionCompiler);
          PDataType expressionDataType = dataExpression.getDataType();
          ParseNode indexedParseNode = expressionParseNode.accept(rewriter);
          PDataType indexColType = IndexUtil.getIndexColumnDataType(dataExpression.isNullable(), expressionDataType);
          ParseNode columnParseNode = new ColumnParseNode(null, colName, null);
          if ( indexColType != expressionDataType) {
              columnParseNode = NODE_FACTORY.cast(columnParseNode, expressionDataType, null, null);
          }
          indexedParseNodeToColumnParseNodeMap.put(indexedParseNode, columnParseNode);
      }
  }
 
Example #26
Source File: HashJoinInfo.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static KeyValueSchema buildSchema(PTable table) {
	KeyValueSchemaBuilder builder = new KeyValueSchemaBuilder(0);
	if (table != null) {
	    for (PColumn column : table.getColumns()) {
	        if (!SchemaUtil.isPKColumn(column)) {
	            builder.addField(column);
	        }
	    }
	}
    return builder.build();
}
 
Example #27
Source File: PhoenixConnection.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public PMetaData removeColumn(PName tenantId, String tableName, List<PColumn> columnsToRemove, long tableTimeStamp,
        long tableSeqNum) throws SQLException {
    metaData = metaData.removeColumn(tenantId, tableName, columnsToRemove, tableTimeStamp, tableSeqNum);
    //Cascade through to connectionQueryServices too
    getQueryServices().removeColumn(tenantId, tableName, columnsToRemove, tableTimeStamp, tableSeqNum);
    return metaData;
}
 
Example #28
Source File: PhoenixConnection.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public PMetaData addColumn(PName tenantId, String tableName, List<PColumn> columns, long tableTimeStamp, long tableSeqNum, boolean isImmutableRows, boolean isWalDisabled, boolean isMultitenant, boolean storeNulls)
        throws SQLException {
    metaData = metaData.addColumn(tenantId, tableName, columns, tableTimeStamp, tableSeqNum, isImmutableRows, isWalDisabled, isMultitenant, storeNulls);
    //Cascade through to connectionQueryServices too
    getQueryServices().addColumn(tenantId, tableName, columns, tableTimeStamp, tableSeqNum, isImmutableRows, isWalDisabled, isMultitenant, storeNulls);
    return metaData;
}
 
Example #29
Source File: IndexTestUtil.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private static void coerceDataValueToIndexValue(PColumn dataColumn, PColumn indexColumn, ImmutableBytesWritable ptr) {
    PDataType dataType = dataColumn.getDataType();
    // TODO: push to RowKeySchema? 
    SortOrder dataModifier = dataColumn.getSortOrder();
    PDataType indexType = indexColumn.getDataType();
    SortOrder indexModifier = indexColumn.getSortOrder();
    // We know ordinal position will match pk position, because you cannot
    // alter an index table.
    indexType.coerceBytes(ptr, dataType, dataModifier, indexModifier);
}
 
Example #30
Source File: JoinCompiler.java    From phoenix with Apache License 2.0 5 votes vote down vote up
private void addProjectedColumn(List<PColumn> projectedColumns, List<Expression> sourceExpressions,
        ListMultimap<String, String> columnNameMap, PColumn sourceColumn, PName familyName, boolean hasSaltingColumn,
        boolean isLocalIndexColumnRef, StatementContext context)
throws SQLException {
    if (sourceColumn == SALTING_COLUMN)
        return;

    int position = projectedColumns.size() + (hasSaltingColumn ? 1 : 0);
    PTable table = tableRef.getTable();
    String schemaName = table.getSchemaName().getString();
    String tableName = table.getTableName().getString();
    String colName = isLocalIndexColumnRef ? IndexUtil.getIndexColumnName(sourceColumn) : sourceColumn.getName().getString();
    String fullName = getProjectedColumnName(schemaName, tableName, colName);
    String aliasedName = tableRef.getTableAlias() == null ? fullName : getProjectedColumnName(null, tableRef.getTableAlias(), colName);

    columnNameMap.put(colName, aliasedName);
    if (!fullName.equals(aliasedName)) {
        columnNameMap.put(fullName, aliasedName);
    }

    PName name = PNameFactory.newName(aliasedName);
    PColumnImpl column = new PColumnImpl(name, familyName, sourceColumn.getDataType(),
            sourceColumn.getMaxLength(), sourceColumn.getScale(), sourceColumn.isNullable(),
            position, sourceColumn.getSortOrder(), sourceColumn.getArraySize(), sourceColumn.getViewConstant(), sourceColumn.isViewReferenced(), sourceColumn.getExpressionStr());
    Expression sourceExpression = isLocalIndexColumnRef ?
              NODE_FACTORY.column(TableName.create(schemaName, tableName), "\"" + colName + "\"", null).accept(new ExpressionCompiler(context))
            : new ColumnRef(tableRef, sourceColumn.getPosition()).newColumnExpression();
    projectedColumns.add(column);
    sourceExpressions.add(sourceExpression);
}