org.apache.hive.hcatalog.data.schema.HCatFieldSchema Java Examples
The following examples show how to use
org.apache.hive.hcatalog.data.schema.HCatFieldSchema.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testFloatTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "float", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 10.0F, 10.F, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "real", Types.FLOAT, HCatFieldSchema.Type.FLOAT, 0, 0, 20.0F, 20.0F, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "double", Types.DOUBLE, HCatFieldSchema.Type.DOUBLE, 0, 0, 30.0D, 30.0D, KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #2
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
/** * Test other file formats. */ public void testSequenceFile() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #3
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testMultipleStaticKeysAndDynamicPartitioning() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "3", "3", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hcatalog-partition-keys"); addlArgsArray.add("col0,col1"); addlArgsArray.add("--hcatalog-partition-values"); addlArgsArray.add("1,2"); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #4
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStaticPartitioning() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #5
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStringTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "char(14)", Types.CHAR, HCatFieldSchema.Type.STRING, 0, 0, "string to test", "string to test", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "char(14)", Types.CHAR, HCatFieldSchema.Type.CHAR, 14, 0, new HiveChar("string to test", 14), "string to test", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "char(14)", Types.CHAR, HCatFieldSchema.Type.VARCHAR, 14, 0, new HiveVarchar("string to test", 14), "string to test", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3), "longvarchar", Types.LONGVARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "string to test", "string to test", KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #6
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testDateTypesToBigInt() throws Exception { final int TOTAL_RECORDS = 1 * 10; long offset = TimeZone.getDefault().getRawOffset(); String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "date", Types.DATE, HCatFieldSchema.Type.BIGINT, 0, 0, 0 - offset, new Date(70, 0, 1), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "time", Types.TIME, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Time(10, 11, 12), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "timestamp", Types.TIMESTAMP, HCatFieldSchema.Type.BIGINT, 0, 0, 36672000L - offset, new Timestamp(70, 0, 1, 10, 11, 12, 0), KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--map-column-hive"); addlArgsArray.add("COL0=bigint,COL1=bigint,COL2=bigint"); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #7
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStaticPartitioningWithMultipleKeys() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hcatalog-partition-keys"); addlArgsArray.add("col0,col1"); addlArgsArray.add("--hcatalog-partition-values"); addlArgsArray.add("1,2"); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #8
Source File: HCatalogTestUtils.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
/** * The record writer mapper for HCatalog tables that writes records from an in * memory list. */ public void createHCatTableUsingSchema(String dbName, String tableName, List<HCatFieldSchema> tableCols, List<HCatFieldSchema> partKeys) throws Exception { String databaseName = dbName == null ? SqoopHCatUtilities.DEFHCATDB : dbName; LOG.info("Dropping HCatalog table if it exists " + databaseName + '.' + tableName); String dropCmd = getHCatDropTableCmd(databaseName, tableName); try { utils.launchHCatCli(dropCmd); } catch (Exception e) { LOG.debug("Drop hcatalog table exception : " + e); LOG.info("Unable to drop table." + dbName + "." + tableName + ". Assuming it did not exist"); } LOG.info("Creating HCatalog table if it exists " + databaseName + '.' + tableName); String createCmd = getHCatCreateTableCmd(databaseName, tableName, tableCols, partKeys); utils.launchHCatCli(createCmd); LOG.info("Created HCatalog table " + dbName + "." + tableName); }
Example #9
Source File: HCatalogTestUtils.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
private List<HCatRecord> generateHCatRecords(int numRecords, HCatSchema hCatTblSchema, ColumnGenerator... extraCols) throws Exception { List<HCatRecord> records = new ArrayList<HCatRecord>(); List<HCatFieldSchema> hCatTblCols = hCatTblSchema.getFields(); int size = hCatTblCols.size(); for (int i = 0; i < numRecords; ++i) { DefaultHCatRecord record = new DefaultHCatRecord(size); record.set(hCatTblCols.get(0).getName(), hCatTblSchema, i); record.set(hCatTblCols.get(1).getName(), hCatTblSchema, "textfield" + i); int idx = 0; for (int j = 0; j < extraCols.length; ++j) { if (extraCols[j].getKeyType() == KeyType.STATIC_KEY) { continue; } record.set(hCatTblCols.get(idx + 2).getName(), hCatTblSchema, extraCols[j].getHCatValue(i)); ++idx; } records.add(record); } return records; }
Example #10
Source File: JsonSerdeUtils.java From incubator-hivemall with Apache License 2.0 | 6 votes |
@Nonnull private static List<Object> parseArray(@Nonnull final JsonParser p, @CheckForNull final List<TypeInfo> columnTypes) throws HCatException, IOException, SerDeException { Preconditions.checkNotNull(columnTypes, "columnTypes MUST NOT be null", SerDeException.class); if (columnTypes.size() != 1) { throw new IOException("Expected a single array but go " + columnTypes); } TypeInfo elemType = columnTypes.get(0); HCatSchema schema = HCatSchemaUtils.getHCatSchema(elemType); HCatFieldSchema listSchema = schema.get(0); HCatFieldSchema elemSchema = listSchema.getArrayElementSchema().get(0); final List<Object> arr = new ArrayList<Object>(); while (p.nextToken() != JsonToken.END_ARRAY) { arr.add(extractCurrentField(p, elemSchema, true)); } return arr; }
Example #11
Source File: HCatInputFormatBase.java From flink with Apache License 2.0 | 6 votes |
/** * Specifies that the InputFormat returns Flink tuples instead of * {@link org.apache.hive.hcatalog.data.HCatRecord}. * * <p>Note: Flink tuples might only support a limited number of fields (depending on the API). * * @return This InputFormat. * @throws org.apache.hive.hcatalog.common.HCatException */ public HCatInputFormatBase<T> asFlinkTuples() throws HCatException { // build type information int numFields = outputSchema.getFields().size(); if (numFields > this.getMaxFlinkTupleSize()) { throw new IllegalArgumentException("Only up to " + this.getMaxFlinkTupleSize() + " fields can be returned as Flink tuples."); } TypeInformation[] fieldTypes = new TypeInformation[numFields]; fieldNames = new String[numFields]; for (String fieldName : outputSchema.getFieldNames()) { HCatFieldSchema field = outputSchema.get(fieldName); int fieldPos = outputSchema.getPosition(fieldName); TypeInformation fieldType = getFieldType(field); fieldTypes[fieldPos] = fieldType; fieldNames[fieldPos] = fieldName; } this.resultType = new TupleTypeInfo(fieldTypes); return this; }
Example #12
Source File: ColumnCardinalityMapper.java From Kylin with Apache License 2.0 | 6 votes |
@Override public void map(T key, HCatRecord value, Context context) throws IOException, InterruptedException { HCatFieldSchema field; Object fieldValue; for (int m = 0; m < columnSize; m++) { field = schema.get(m); fieldValue = value.get(field.getName(), schema); if (fieldValue == null) fieldValue = "NULL"; if (counter < 5 && m < 10) { System.out.println("Get row " + counter + " column '" + field.getName() + "' value: " + fieldValue); } if (fieldValue != null) getHllc(m).add(Bytes.toBytes(fieldValue.toString())); } counter++; }
Example #13
Source File: FactDistinctColumnsMapper.java From Kylin with Apache License 2.0 | 6 votes |
@Override public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException { try { int[] flatTableIndexes = intermediateTableDesc.getRowKeyColumnIndexes(); HCatFieldSchema fieldSchema = null; for (int i : factDictCols) { outputKey.set((short) i); fieldSchema = schema.get(flatTableIndexes[i]); Object fieldValue = record.get(fieldSchema.getName(), schema); if (fieldValue == null) continue; byte[] bytes = Bytes.toBytes(fieldValue.toString()); outputValue.set(bytes, 0, bytes.length); context.write(outputKey, outputValue); } } catch (Exception ex) { handleErrorRecord(record, ex); } }
Example #14
Source File: IIDistinctColumnsMapper.java From Kylin with Apache License 2.0 | 6 votes |
@Override public void map(KEYIN key, HCatRecord record, Context context) throws IOException, InterruptedException { HCatFieldSchema fieldSchema = null; for (short i = 0; i < columnSize; i++) { outputKey.set(i); fieldSchema = schema.get(i); Object fieldValue = record.get(fieldSchema.getName(), schema); if (fieldValue == null) continue; byte[] bytes = Bytes.toBytes(fieldValue.toString()); outputValue.set(bytes, 0, bytes.length); context.write(outputKey, outputValue); } }
Example #15
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTextFile() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); utils.setStorageInfo(HCatalogTestUtils.STORED_AS_TEXT); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #16
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testNumberTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0, "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0, "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2, HiveDecimal.create(new BigDecimal("2000")), new BigDecimal("2000"), KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #17
Source File: HCatalogExportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testIntTypes() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "boolean", Types.BOOLEAN, HCatFieldSchema.Type.BOOLEAN, 0, 0, Boolean.TRUE, Boolean.TRUE, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "tinyint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 10, 10, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "smallint", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 100, 100, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(3), "int", Types.INTEGER, HCatFieldSchema.Type.INT, 0, 0, 1000, 1000, KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(4), "bigint", Types.BIGINT, HCatFieldSchema.Type.BIGINT, 0, 0, 10000L, 10000L, KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols); }
Example #18
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testCreateTableWithPreExistingTable() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--create-hcatalog-table"); setExtraArgs(addlArgsArray); try { // Precreate table utils.createHCatTable(CreateMode.CREATE, TOTAL_RECORDS, table, cols); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false); fail("HCatalog job with --create-hcatalog-table and pre-existing" + " table should fail"); } catch (Exception e) { LOG.debug("Caught expected exception while running " + " create-hcatalog-table with pre-existing table test", e); } }
Example #19
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testHiveDelimsReplacement() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "^^^Test", "\u0001\n\rTest", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "^^^Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-delims-replacement"); addlArgsArray.add("^"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #20
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testHiveDropDelims() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "Test", "\u0001\n\rTest", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "Test2", "\u0001\r\nTest2", KeyType.NOT_A_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-drop-import-delims"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #21
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTableCreationWithStorageStanza() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col1"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("2"); addlArgsArray.add("--create-hcatalog-table"); addlArgsArray.add("--hcatalog-storage-stanza"); addlArgsArray.add(HCatalogTestUtils.STORED_AS_TEXT); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false); }
Example #22
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTableCreationWithMultipleStaticPartKeys() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hcatalog-partition-keys"); addlArgsArray.add("col0,col1"); addlArgsArray.add("--hcatalog-partition-values"); addlArgsArray.add("1,2"); addlArgsArray.add("--create-hcatalog-table"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false); }
Example #23
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTableCreationWithPartition() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("1", 20), "1", KeyType.NOT_A_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.VARCHAR, 20, 0, new HiveVarchar("2", 20), "2", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col1"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("2"); addlArgsArray.add("--create-hcatalog-table"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false); }
Example #24
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTableCreation() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, new HiveVarchar("1", 20), "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, new HiveVarchar("2", 20), "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--create-hcatalog-table"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null, true, false); }
Example #25
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testTextFile() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); setExtraArgs(addlArgsArray); utils.setStorageInfo(HCatalogTestUtils.STORED_AS_TEXT); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #26
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
/** * Test other file formats. */ public void testSequenceFile() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); setExtraArgs(addlArgsArray); utils.setStorageInfo(HCatalogTestUtils.STORED_AS_SEQFILE); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #27
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testMultipleStaticKeysAndDynamicPartitioning() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "3", "3", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hcatalog-partition-keys"); addlArgsArray.add("col0,col1"); addlArgsArray.add("--hcatalog-partition-values"); addlArgsArray.add("1,2"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #28
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStaticAndDynamicPartitioning() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.DYNAMIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #29
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStaticPartitioningWithMultipleKeys() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "2", "2", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hcatalog-partition-keys"); addlArgsArray.add("col0,col1"); addlArgsArray.add("--hcatalog-partition-values"); addlArgsArray.add("1,2"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }
Example #30
Source File: HCatalogImportTest.java From aliyun-maxcompute-data-collectors with Apache License 2.0 | 6 votes |
public void testStaticPartitioning() throws Exception { final int TOTAL_RECORDS = 1 * 10; String table = getTableName().toUpperCase(); ColumnGenerator[] cols = new ColumnGenerator[] { HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), "varchar(20)", Types.VARCHAR, HCatFieldSchema.Type.STRING, 0, 0, "1", "1", KeyType.STATIC_KEY), }; List<String> addlArgsArray = new ArrayList<String>(); addlArgsArray.add("--hive-partition-key"); addlArgsArray.add("col0"); addlArgsArray.add("--hive-partition-value"); addlArgsArray.add("1"); setExtraArgs(addlArgsArray); runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null); }