scala.collection.Iterator Java Examples

The following examples show how to use scala.collection.Iterator. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CsvSourceTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetSourceDataFromFactTable() {
    CubeManager cubeMgr = CubeManager.getInstance(getTestConfig());
    CubeInstance cube = cubeMgr.getCube(CUBE_NAME);
    TableDesc fact = MetadataConverter.extractFactTable(cube);
    List<ColumnDesc> colDescs = Lists.newArrayList();
    Iterator<ColumnDesc> iterator = fact.columns().iterator();
    while (iterator.hasNext()) {
        colDescs.add(iterator.next());
    }

    NSparkCubingEngine.NSparkCubingSource cubingSource = new CsvSource().adaptToBuildEngine(NSparkCubingEngine.NSparkCubingSource.class);
    Dataset<Row> cubeDS = cubingSource.getSourceData(fact, ss, Maps.newHashMap());
    cubeDS.take(10);
    StructType schema = cubeDS.schema();
    for (int i = 0; i < colDescs.size(); i++) {
        StructField field = schema.fields()[i];
        Assert.assertEquals(field.name(), colDescs.get(i).columnName());
        Assert.assertEquals(field.dataType(), colDescs.get(i).dataType());
    }
}
 
Example #2
Source File: KafkaAuthBinding.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public boolean removeAcls(scala.collection.immutable.Set<Acl> acls, final Resource resource) {
    verifyAcls(acls);
    LOG.info("Removing Acl: acl->" + acls + " resource->" + resource);
    final Iterator<Acl> iterator = acls.iterator();
    while (iterator.hasNext()) {
        final Acl acl = iterator.next();
        final String role = getRole(acl);
        try {
            execute(new Command<Void>() {
                @Override
                public Void run(SentryGenericServiceClient client) throws Exception {
                    client.dropPrivilege(
                            requestorName, role, toTSentryPrivilege(acl, resource));
                    return null;
                }
            });
        } catch (KafkaException kex) {
            LOG.error("Failed to remove acls.", kex);
            return false;
        }
    }

    return true;
}
 
Example #3
Source File: KafkaAuthBinding.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public void addAcls(scala.collection.immutable.Set<Acl> acls, final Resource resource) {
    verifyAcls(acls);
    LOG.info("Adding Acl: acl->" + acls + " resource->" + resource);

    final Iterator<Acl> iterator = acls.iterator();
    while (iterator.hasNext()) {
        final Acl acl = iterator.next();
        final String role = getRole(acl);
        if (!roleExists(role)) {
            throw new KafkaException("Can not add Acl for non-existent Role: " + role);
        }
        execute(new Command<Void>() {
            @Override
            public Void run(SentryGenericServiceClient client) throws Exception {
                client.grantPrivilege(
                    requestorName, role, COMPONENT_NAME, toTSentryPrivilege(acl, resource));
                return null;
            }
        });
    }
}
 
Example #4
Source File: SocialNetworkStructureBuilder.java    From DataGenerator with Apache License 2.0 6 votes vote down vote up
/**
 * Build all combinations of graph structures for generic event stubs of a maximum length
 * @param length Maximum number of nodes in each to generate
 * @return All graph combinations of specified length or less
 */
public Vector<Graph<UserStub>> generateAllNodeDataTypeGraphCombinationsOfMaxLength(int length) {
    Vector<Graph<UserStub>> graphs = super.generateAllNodeDataTypeGraphCombinationsOfMaxLength(length);

    if (WRITE_STRUCTURES_IN_PARALLEL) {
        // Left as an exercise to the student.
        throw new NotImplementedError();
    } else {
        int i = 0;
        for (Iterator<Graph<UserStub>> iter = graphs.toIterator(); iter.hasNext();) {
            Graph<UserStub> graph = iter.next();
            graph.setGraphId("S_" + ++i + "_" + graph.allNodes().size());
            graph.writeDotFile(outDir + graph.graphId() + ".gv", false, ALSO_WRITE_AS_PNG);
        }
        System.out.println("Wrote " + i + " graph files in DOT format to " + outDir + "");
    }

    return graphs;
}
 
Example #5
Source File: PrepareSaveFunction.java    From deep-spark with Apache License 2.0 6 votes vote down vote up
@Override
public BoxedUnit apply(Iterator<T> v1) {
    IExtractor<T, S> extractor;
    try {
        extractor = getExtractorInstance(config);
    } catch (DeepExtractorInitializationException e) {
        extractor = getExtractorClient();
    }

    extractor.initSave(config, first, queryBuilder);

    while (v1.hasNext()) {
        extractor.saveRDD(v1.next());
    }
    config.setPartitionId(config.getPartitionId() + 1);
    extractor.close();
    return null;
}
 
Example #6
Source File: ServiceKafkaClient.java    From ranger with Apache License 2.0 6 votes vote down vote up
private List<String> getTopicList(List<String> ignoreTopicList) throws Exception {
	List<String> ret = new ArrayList<String>();

	int sessionTimeout = 5000;
	int connectionTimeout = 10000;
	ZooKeeperClient zookeeperClient = new ZooKeeperClient(zookeeperConnect, sessionTimeout, connectionTimeout,
			1, Time.SYSTEM, "kafka.server", "SessionExpireListener", Option.empty());
	try (KafkaZkClient kafkaZkClient = new KafkaZkClient(zookeeperClient, true, Time.SYSTEM)) {
		Iterator<String> iter = kafkaZkClient.getAllTopicsInCluster().iterator();
		while (iter.hasNext()) {
			String topic = iter.next();
			if (ignoreTopicList == null || !ignoreTopicList.contains(topic)) {
				ret.add(topic);
			}
		}
	}
	return ret;
}
 
Example #7
Source File: ParserQueryFunctionsTest.java    From odata with Apache License 2.0 6 votes vote down vote up
private void testQueryFunction(String operator) throws ODataException {
    EqExpr expr = getExprFromOperator(operator);
    MethodCallExpr call = (MethodCallExpr) expr.left();
    assertThat(call.methodName(), is(operator));
    List<Expression> args = call.args();
    Iterator iter = args.iterator();
    while (iter.hasNext()) {
        Object obj = iter.next();
        if (obj instanceof EntityPathExpr) {
            EntityPathExpr entityPathExpr = (EntityPathExpr) obj;
            PropertyPathExpr propertyPath = (PropertyPathExpr) entityPathExpr.subPath().get();
            assertThat(propertyPath.propertyName(), is("name"));
        }
    }
    LiteralExpr literal = (LiteralExpr) expr.right();
    NumberLiteral number = (NumberLiteral) literal.value();
    assertThat(number.value(), is(new BigDecimal(new java.math.BigDecimal(19))));
}
 
Example #8
Source File: ParserLogicalTest.java    From odata with Apache License 2.0 6 votes vote down vote up
private void processQueryFunction(FilterOption option, String boolMethod) {
    BooleanMethodCallExpr methodCall = (BooleanMethodCallExpr) option.expression();
    assertThat(methodCall.methodName(), is(boolMethod));
    List<Expression> args = methodCall.args();
    Iterator iterator = args.iterator();
    while (iterator.hasNext()) {
        Object cursor = iterator.next();
        if (cursor instanceof EntityPathExpr) {
            EntityPathExpr pathExpr = (EntityPathExpr) cursor;
            PropertyPathExpr path = (PropertyPathExpr) pathExpr.subPath().get();
            assertThat(path.propertyName(), is("name"));
        } else if (cursor instanceof LiteralExpr) {
            LiteralExpr literalExpr = (LiteralExpr) cursor;
            StringLiteral stringLiteral = (StringLiteral) literalExpr.value();
            assertThat(stringLiteral.value(), is("John"));
        }
    }
}
 
Example #9
Source File: OpenKoreanTextPhraseExtractor.java    From elasticsearch-analysis-openkoreantext with Apache License 2.0 6 votes vote down vote up
private Seq<KoreanToken> convertPhrasesToTokens(Seq<KoreanPhrase> phrases) {
    KoreanToken[] tokens = new KoreanToken[phrases.length()];

    Iterator<KoreanPhrase> iterator = phrases.iterator();
    int i = 0;
    while (iterator.hasNext()) {
        KoreanPhrase phrase = iterator.next();
        tokens[i++] = new KoreanToken(phrase.text(), phrase.pos(), phrase.offset(), phrase.length(), scala.Option.apply(null), false);
    }

    Arrays.sort(tokens, (o1, o2) -> {
        if(o1.offset()== o2.offset())
            return 0;
        return o1.offset()< o2.offset()? -1 : 1;
    });

    return JavaConverters.asScalaBuffer(Arrays.asList(tokens)).toSeq();
}
 
Example #10
Source File: SparkSqlInterpreter.java    From Explorer with Apache License 2.0 5 votes vote down vote up
public int getProgress() {
    SQLContext sqlc = getSparkInterpreter().getSQLContext();
    SparkContext sc = sqlc.sparkContext();
    JobProgressListener sparkListener = getSparkInterpreter().getJobProgressListener();
    int completedTasks = 0;
    int totalTasks = 0;

    DAGScheduler scheduler = sc.dagScheduler();
    HashSet<ActiveJob> jobs = scheduler.activeJobs();
    Iterator<ActiveJob> it = jobs.iterator();
    while (it.hasNext()) {
        ActiveJob job = it.next();
        String g = (String) job.properties().get("spark.jobGroup.id");
        if (jobGroup.equals(g)) {
            int[] progressInfo = null;
            if (sc.version().startsWith("1.0")) {
                progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
            } else if (sc.version().startsWith("1.1") || sc.version().startsWith("1.2")) {
                progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
            } else {
                logger.warn("Spark {} getting progress information not supported" + sc.version());
                continue;
            }
            totalTasks += progressInfo[0];
            completedTasks += progressInfo[1];
        }
    }

    if (totalTasks == 0) {
        return 0;
    }
    return completedTasks * 100 / totalTasks;
}
 
Example #11
Source File: MLContextTest.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputRDDStringCSVDenseDML() {
	System.out.println("MLContextTest - output RDD String CSV Dense DML");

	String s = "M = matrix('1 2 3 4', rows=2, cols=2); print(toString(M));";
	Script script = dml(s).out("M");
	MLResults results = ml.execute(script);
	RDD<String> rddStringCSV = results.getRDDStringCSV("M");
	Iterator<String> iterator = rddStringCSV.toLocalIterator();
	Assert.assertEquals("1.0,2.0", iterator.next());
	Assert.assertEquals("3.0,4.0", iterator.next());
}
 
Example #12
Source File: MLContextTest.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputRDDStringCSVSparseDML() {
	System.out.println("MLContextTest - output RDD String CSV Sparse DML");

	String s = "M = matrix(0, rows=10, cols=10); M[1,1]=1; M[1,2]=2; M[2,1]=3; M[2,2]=4; print(toString(M));";
	Script script = dml(s).out("M");
	MLResults results = ml.execute(script);
	RDD<String> rddStringCSV = results.getRDDStringCSV("M");
	Iterator<String> iterator = rddStringCSV.toLocalIterator();
	Assert.assertEquals("1.0,2.0", iterator.next());
	Assert.assertEquals("3.0,4.0", iterator.next());
}
 
Example #13
Source File: MLContextTest.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputMatrixObjectDML() {
	System.out.println("MLContextTest - output matrix object DML");
	String s = "M = matrix('1 2 3 4', rows=2, cols=2);";
	MatrixObject mo = ml.execute(dml(s).out("M")).getMatrixObject("M");
	RDD<String> rddStringCSV = MLContextConversionUtil.matrixObjectToRDDStringCSV(mo);
	Iterator<String> iterator = rddStringCSV.toLocalIterator();
	Assert.assertEquals("1.0,2.0", iterator.next());
	Assert.assertEquals("3.0,4.0", iterator.next());
}
 
Example #14
Source File: MLContextTest.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputRDDStringCSVFromMatrixDML() {
	System.out.println("MLContextTest - output RDD String CSV from matrix DML");

	String s = "M = matrix('1 2 3 4', rows=1, cols=4);";
	Script script = dml(s).out("M");
	RDD<String> rddStringCSV = ml.execute(script).getMatrix("M").toRDDStringCSV();
	Iterator<String> iterator = rddStringCSV.toLocalIterator();
	Assert.assertEquals("1.0,2.0,3.0,4.0", iterator.next());
}
 
Example #15
Source File: DataConverter.java    From AsyncDao with MIT License 5 votes vote down vote up
public static <T> T queryResultToObject(QueryResult queryResult, Class<T> clazz, ModelMap resultMap) {
    final Option<ResultSet> rows = queryResult.rows();
    if (rows.isDefined()) {
        List<String> columnNames = ScalaUtils.toJavaList(rows.get().columnNames().toList());
        Iterator<RowData> iterator = rows.get().iterator();
        if (iterator.hasNext()) {
            try {
                return rowDataToObject(iterator.next(), clazz, resultMap, columnNames);
            } catch (Exception e) {
                log.error("convert object error :{}", e);
            }
        }
    }
    return null;
}
 
Example #16
Source File: PythonCorrelateSplitRule.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<String> createNewFieldNames(
	RelDataType rowType,
	RexBuilder rexBuilder,
	int primitiveFieldCount,
	ArrayBuffer<RexNode> extractedRexNodes,
	List<RexNode> calcProjects) {
	for (int i = 0; i < primitiveFieldCount; i++) {
		calcProjects.add(RexInputRef.of(i, rowType));
	}
	// add the fields of the extracted rex calls.
	Iterator<RexNode> iterator = extractedRexNodes.iterator();
	while (iterator.hasNext()) {
		calcProjects.add(iterator.next());
	}

	List<String> nameList = new LinkedList<>();
	for (int i = 0; i < primitiveFieldCount; i++) {
		nameList.add(rowType.getFieldNames().get(i));
	}
	Iterator<Object> indicesIterator = extractedRexNodes.indices().iterator();
	while (indicesIterator.hasNext()) {
		nameList.add("f" + indicesIterator.next());
	}
	return SqlValidatorUtil.uniquify(
		nameList,
		rexBuilder.getTypeFactory().getTypeSystem().isSchemaCaseSensitive());
}
 
Example #17
Source File: PythonCorrelateSplitRule.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<String> createNewFieldNames(
	RelDataType rowType,
	RexBuilder rexBuilder,
	int primitiveFieldCount,
	ArrayBuffer<RexNode> extractedRexNodes,
	List<RexNode> calcProjects) {
	for (int i = 0; i < primitiveFieldCount; i++) {
		calcProjects.add(RexInputRef.of(i, rowType));
	}
	// add the fields of the extracted rex calls.
	Iterator<RexNode> iterator = extractedRexNodes.iterator();
	while (iterator.hasNext()) {
		calcProjects.add(iterator.next());
	}

	List<String> nameList = new LinkedList<>();
	for (int i = 0; i < primitiveFieldCount; i++) {
		nameList.add(rowType.getFieldNames().get(i));
	}
	Iterator<Object> indicesIterator = extractedRexNodes.indices().iterator();
	while (indicesIterator.hasNext()) {
		nameList.add("f" + indicesIterator.next());
	}
	return SqlValidatorUtil.uniquify(
		nameList,
		rexBuilder.getTypeFactory().getTypeSystem().isSchemaCaseSensitive());
}
 
Example #18
Source File: DashBoardCollectorItemMapBuilder.java    From ExecDashboard with Apache License 2.0 5 votes vote down vote up
public static final List<DashboardCollectorItem> getPipelineDashboardCollectorItems(Dataset<Row> ds) {
    List<DashboardCollectorItem> arr = new ArrayList<>();
    List<Row> dd = ds.collectAsList();
    dd.forEach(row -> {
        WrappedArray dashboardIds = row.getAs("dashboardIds");
        Iterator iterdashboardIds = dashboardIds.iterator();
        WrappedArray itemArray = row.getAs("collectorItems");
        Iterator iter = itemArray.iterator();

        for (int i = 0; i < dashboardIds.length(); i++) {
            String productName = row.getAs("productName");
            String componentName = row.getAs("componentName");
            String dashboardId = (String) ((GenericRowWithSchema) row.getAs("dashboardId")).values()[0];

            List<String> itemIds = new ArrayList<>();
            DashboardCollectorItem dashboardCollectorItem = null;

            dashboardCollectorItem = new DashboardCollectorItem();
            String grs = (String) iterdashboardIds.next();
            dashboardCollectorItem.setDashboardId(grs);
            GenericRowWithSchema collId = (GenericRowWithSchema) iter.next();
            itemIds.add((String) collId.get(0));
            dashboardCollectorItem.setItems(itemIds);
            String dashboardTitle = row.getAs("title");
            String key = productName + DELIMITER + componentName + DELIMITER + dashboardTitle;

            dashboardCollectorItem.setName(key);

            dashboardCollectorItem.setProductDashboardIds(dashboardId);
            arr.add(dashboardCollectorItem);
        }

    });
    return arr;
}
 
Example #19
Source File: DeepRDD.java    From deep-spark with Apache License 2.0 5 votes vote down vote up
@Override
public Iterator<T> compute(Partition split, TaskContext context) {

    initExtractorClient();

    extractorClient.initIterator(split, config.getValue());

    context.addTaskCompletionListener(new AbstractFunction1<TaskContext, BoxedUnit>() {

        @Override
        public BoxedUnit apply(TaskContext v1) {
            extractorClient.close();
            return null;
        }
    });

    java.util.Iterator<T> iterator = new java.util.Iterator<T>() {

        @Override
        public boolean hasNext() {
            return extractorClient.hasNext();
        }

        @Override
        public T next() {
            return extractorClient.next();
        }

        @Override
        public void remove() {
            throw new DeepIOException(
                    "Method not implemented (and won't be implemented anytime soon!!!)");
        }
    };

    return new InterruptibleIterator<>(context, asScalaIterator(iterator));

}
 
Example #20
Source File: KafkaRecordReaderTest.java    From kangaroo with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testContinueItrMultipleIterations() throws Exception {
    // init split
    doReturn(mockConsumer).when(reader).getConsumer(split, conf);
    split.setEndOffset(4097);
    reader.initialize(split, context);

    // first iteration
    final Iterator<MessageAndOffset> mockIterator1 = mock(Iterator.class);
    when(mockConsumer.fetch(any(FetchRequest.class))).thenReturn(mockMessage);
    when(mockMessage.getErrorCode()).thenReturn(ErrorMapping.NoError());
    when(mockMessage.iterator()).thenReturn(mockIterator1);
    when(mockMessage.validBytes()).thenReturn(2048l);
    when(mockIterator1.hasNext()).thenReturn(true);

    assertTrue("Should be able to continue iterator!", reader.continueItr());

    // reset iterator for second iteration
    when(mockIterator1.hasNext()).thenReturn(false);
    final Iterator<MessageAndOffset> mockIterator2 = mock(Iterator.class);
    when(mockMessage.iterator()).thenReturn(mockIterator2);
    when(mockIterator2.hasNext()).thenReturn(true);

    assertTrue("Should be able to continue iterator!", reader.continueItr());

    // reset iterator for third iteration
    when(mockIterator2.hasNext()).thenReturn(false);
    final Iterator<MessageAndOffset> mockIterator3 = mock(Iterator.class);
    when(mockMessage.iterator()).thenReturn(mockIterator3);
    when(mockIterator3.hasNext()).thenReturn(true);
    when(mockMessage.validBytes()).thenReturn(1l);

    assertTrue("Should be able to continue iterator!", reader.continueItr());

    // out of bytes to read
    when(mockIterator3.hasNext()).thenReturn(false);
    assertFalse("Should be done with split!", reader.continueItr());
}
 
Example #21
Source File: SparkInterpreter.java    From Explorer with Apache License 2.0 5 votes vote down vote up
public int getProgress() {
    int completedTasks = 0;
    int totalTasks = 0;

    DAGScheduler scheduler = context.getConnector().dagScheduler();
    if (scheduler == null) {
        return 0;
    }
    HashSet<ActiveJob> jobs = scheduler.activeJobs();
    if (jobs == null || jobs.size() == 0) {
        return 0;
    }
    Iterator<ActiveJob> it = jobs.iterator();
    while (it.hasNext()) {
        ActiveJob job = it.next();
        String g = (String) job.properties().get("spark.jobGroup.id");
        if (jobGroup.equals(g)) {
            int[] progressInfo = null;
            if (context.getConnector().version().startsWith("1.0")) {
                progressInfo = getProgressFromStage_1_0x(sparkListener, job.finalStage());
            } else {
                progressInfo = getProgressFromStage_1_1x(sparkListener, job.finalStage());
            }

            totalTasks += progressInfo[0];
            completedTasks += progressInfo[1];
        }
    }

    if (totalTasks == 0) {
        return 0;
    }
    return completedTasks * 100 / totalTasks;
}
 
Example #22
Source File: MLContextTest.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputRDDStringIJVDML() {
	System.out.println("MLContextTest - output RDD String IJV DML");

	String s = "M = matrix('1 2 3 4', rows=2, cols=2);";
	Script script = dml(s).out("M");
	MLResults results = ml.execute(script);
	RDD<String> rddStringIJV = results.getRDDStringIJV("M");
	Iterator<String> iterator = rddStringIJV.toLocalIterator();
	Assert.assertEquals("1 1 1.0", iterator.next());
	Assert.assertEquals("1 2 2.0", iterator.next());
	Assert.assertEquals("2 1 3.0", iterator.next());
	Assert.assertEquals("2 2 4.0", iterator.next());
}
 
Example #23
Source File: SparkSessionBuilderImpl.java    From beakerx with Apache License 2.0 5 votes vote down vote up
@Override
public Map<String, Object> getSparkConfAsMap() {
  Map<String, Object> sparkConf = new HashMap();
  Iterator iterator = getConfigIterator();
  while (iterator.hasNext()) {
    Tuple2 x = (Tuple2) iterator.next();
    sparkConf.put((String) (x)._1, (x)._2);
  }
  return sparkConf;
}
 
Example #24
Source File: SparkSessionBuilderImpl.java    From beakerx with Apache License 2.0 5 votes vote down vote up
public SparkConf getSparkConf() {
  SparkConf sparkConf = new SparkConf();
  Iterator iterator = getConfigIterator();
  while (iterator.hasNext()) {
    Tuple2 x = (Tuple2) iterator.next();
    sparkConf.set((String) (x)._1, (String) (x)._2);
  }
  return sparkConf;
}
 
Example #25
Source File: SparkSessionBuilderImpl.java    From beakerx with Apache License 2.0 5 votes vote down vote up
private Iterator getConfigIterator() {
  try {
    Field options = getOptionsField(sparkSessionBuilder);
    options.setAccessible(true);
    return ((scala.collection.mutable.HashMap) options.get(sparkSessionBuilder)).iterator();
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
Example #26
Source File: GarmadonSparkStorageStatusListener.java    From garmadon with Apache License 2.0 5 votes vote down vote up
/**
 * capture new rdd information
 */
@Override
public void onStageSubmitted(SparkListenerStageSubmitted event) {
    Iterator<RDDInfo> it = event.stageInfo().rddInfos().iterator();
    while (it.hasNext()) {
        RDDInfo info = it.next();
        if (info.storageLevel().isValid()) {
            liveRDDs.computeIfAbsent(info.id(), key -> new GarmadonRDDStorageInfo(info.name()));
        }
    }
}
 
Example #27
Source File: CsvSourceTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetSourceDataFromLookupTable() {
    CubeManager cubeMgr = CubeManager.getInstance(getTestConfig());
    CubeInstance cube = cubeMgr.getCube(CUBE_NAME);
    Iterator<TableDesc> iterator = MetadataConverter.extractLookupTable(cube).iterator();
    while (iterator.hasNext()) {
        TableDesc lookup = iterator.next();
        NSparkCubingEngine.NSparkCubingSource cubingSource = new CsvSource().adaptToBuildEngine(NSparkCubingEngine.NSparkCubingSource.class);
        Dataset<Row> sourceData = cubingSource.getSourceData(lookup, ss, Maps.newHashMap());
        List<Row> rows = sourceData.collectAsList();
        Assert.assertTrue(rows != null && rows.size() > 0);
    }
}
 
Example #28
Source File: CollectionUtils.java    From Tok-Android with GNU General Public License v3.0 5 votes vote down vote up
public static List convertArrayBufferToList(ArrayBuffer arrayBuffer) {
    List list = new ArrayList<>();
    Iterator it = arrayBuffer.iterator();
    while (it.hasNext()) {
        list.add(it.next());
    }
    return list;
}
 
Example #29
Source File: KafkaAuthBinding.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private boolean isPrivilegeForResource(TSentryPrivilege tSentryPrivilege, Resource resource) {
    final java.util.Iterator<TAuthorizable> authorizablesIterator = tSentryPrivilege.getAuthorizablesIterator();
    while (authorizablesIterator.hasNext()) {
        TAuthorizable tAuthorizable = authorizablesIterator.next();
        if (tAuthorizable.getType().equals(resource.resourceType().name())) {
            return true;
        }
    }
    return false;
}
 
Example #30
Source File: KafkaAuthBinding.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private java.util.Map<Resource, scala.collection.immutable.Set<Acl>> rolePrivilegesToResourceAcls(java.util.Map<String, scala.collection.immutable.Set<TSentryPrivilege>> rolePrivilegesMap) {
    final java.util.Map<Resource, scala.collection.immutable.Set<Acl>> resourceAclsMap = new HashMap<>();
    for (String role : rolePrivilegesMap.keySet()) {
        scala.collection.immutable.Set<TSentryPrivilege> privileges = rolePrivilegesMap.get(role);
        final Iterator<TSentryPrivilege> iterator = privileges.iterator();
        while (iterator.hasNext()) {
            TSentryPrivilege privilege = iterator.next();
            final List<TAuthorizable> authorizables = privilege.getAuthorizables();
            String host = null;
            String operation = privilege.getAction();
            for (TAuthorizable tAuthorizable : authorizables) {
                if (tAuthorizable.getType().equals(KafkaAuthorizable.AuthorizableType.HOST.name())) {
                    host = tAuthorizable.getName();
                } else {
                    Resource resource = new Resource(ResourceType$.MODULE$.fromString(tAuthorizable.getType()), tAuthorizable.getName());
                    if (operation.equals("*")) {
                        operation = "All";
                    }
                    Acl acl = new Acl(new KafkaPrincipal("role", role), Allow$.MODULE$, host, Operation$.MODULE$.fromString(operation));
                    Set<Acl> newAclsJava = new HashSet<Acl>();
                    newAclsJava.add(acl);
                    addExistingAclsForResource(resourceAclsMap, resource, newAclsJava);
                    final scala.collection.mutable.Set<Acl> aclScala = JavaConversions.asScalaSet(newAclsJava);
                    resourceAclsMap.put(resource, aclScala.<Acl>toSet());
                }
            }
        }
    }

    return resourceAclsMap;
}