Java Code Examples for org.apache.kylin.common.util.HadoopUtil#newHBaseConfiguration()
The following examples show how to use
org.apache.kylin.common.util.HadoopUtil#newHBaseConfiguration() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseConnection.java From Kylin with Apache License 2.0 | 6 votes |
public static HConnection get(String url) { // find configuration Configuration conf = ConfigCache.get(url); if (conf == null) { conf = HadoopUtil.newHBaseConfiguration(url); ConfigCache.put(url, conf); } HConnection connection = ConnPool.get(url); try { // I don't use DCL since recreate a connection is not a big issue. if (connection == null) { connection = HConnectionManager.createConnection(conf); ConnPool.put(url, connection); } } catch (Throwable t) { throw new StorageException("Error when open connection " + url, t); } return connection; }
Example 2
Source File: DeployCoprocessorCLI.java From Kylin with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv(); Configuration hconf = HadoopUtil.newHBaseConfiguration(kylinConfig.getStorageUrl()); FileSystem fileSystem = FileSystem.get(hconf); HBaseAdmin hbaseAdmin = new HBaseAdmin(hconf); String localCoprocessorJar = new File(args[0]).getAbsolutePath(); logger.info("Identify coprocessor jar " + localCoprocessorJar); List<String> tableNames = getHTableNames(kylinConfig); logger.info("Identify tables " + tableNames); Set<String> oldJarPaths = getCoprocessorJarPaths(hbaseAdmin, tableNames); logger.info("Old coprocessor jar: " + oldJarPaths); Path hdfsCoprocessorJar = uploadCoprocessorJar(localCoprocessorJar, fileSystem, oldJarPaths); logger.info("New coprocessor jar: " + hdfsCoprocessorJar); List<String> processedTables = resetCoprocessorOnHTables(hbaseAdmin, hdfsCoprocessorJar, tableNames); // Don't remove old jars, missing coprocessor jar will fail hbase // removeOldJars(oldJarPaths, fileSystem); hbaseAdmin.close(); logger.info("Processed " + processedTables); logger.info("Active coprocessor jar: " + hdfsCoprocessorJar); }
Example 3
Source File: InvertedIndexHBaseTest.java From Kylin with Apache License 2.0 | 5 votes |
@Before public void setup() throws Exception { this.createTestMetadata(); this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii"); this.seg = ii.getFirstSegment(); String hbaseUrl = KylinConfig.getInstanceFromEnv().getStorageUrl(); Configuration hconf = HadoopUtil.newHBaseConfiguration(hbaseUrl); hconn = HConnectionManager.createConnection(hconf); this.info = new TableRecordInfo(seg); }
Example 4
Source File: DefaultScheduler.java From Kylin with Apache License 2.0 | 4 votes |
private String getZKConnectString(JobEngineConfig context) { Configuration conf = HadoopUtil.newHBaseConfiguration(context.getConfig().getStorageUrl()); return conf.get(HConstants.ZOOKEEPER_QUORUM) + ":" + conf.get(HConstants.ZOOKEEPER_CLIENT_PORT); }
Example 5
Source File: PingHBaseCLI.java From Kylin with Apache License 2.0 | 4 votes |
public static void main(String[] args) throws IOException { String metadataUrl = args[0]; String hbaseTable = args[1]; System.out.println("Hello friend."); Configuration hconf = HadoopUtil.newHBaseConfiguration(metadataUrl); if (User.isHBaseSecurityEnabled(hconf)) { try { System.out.println("--------------Getting kerberos credential for user " + UserGroupInformation.getCurrentUser().getUserName()); TokenUtil.obtainAndCacheToken(hconf, UserGroupInformation.getCurrentUser()); } catch (InterruptedException e) { System.out.println("--------------Error while getting kerberos credential for user " + UserGroupInformation.getCurrentUser().getUserName()); } } Scan scan = new Scan(); int limit = 20; HConnection conn = null; HTableInterface table = null; ResultScanner scanner = null; try { conn = HConnectionManager.createConnection(hconf); table = conn.getTable(hbaseTable); scanner = table.getScanner(scan); int count = 0; for (Result r : scanner) { byte[] rowkey = r.getRow(); System.out.println(Bytes.toStringBinary(rowkey)); count++; if (count == limit) break; } } finally { if (scanner != null) { scanner.close(); } if (table != null) { table.close(); } if (conn != null) { conn.close(); } } }