Java Code Examples for com.google.common.collect.Lists.partition()

The following are Jave code examples for showing how to use partition() of the com.google.common.collect.Lists class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: springboot-spwa-gae-demo   File: AbstractRepository.java   View Source Code Vote up 6 votes
@Override
public int reindex(List<K> keys, int batchSize, ReindexOperation<E> reindexOperation) {
    int count = 0;
    List<List<K>> batches = Lists.partition(keys, batchSize);
    for (List<K> batchKeys : batches) {
        List<E> batch = get(batchKeys);
        batch = reindexOperation == null ? batch : reindexOperation.apply(batch);
        if (reindexOperation != null) {
            // we only re-save the batch when a re-index op is supplied, otherwise the data can't have changed.
            ofy().save().entities(batch).now();
        }
        if (shouldSearch()) {
            index(batch).complete();
        }
        count += batch.size();
        ofy().clear(); // Clear the Objectify cache to free memory for next batch
        Logger.info("Reindexed %d entities of type %s, %d of %d", batch.size(), entityType.getSimpleName(), count, keys.size());
    }
    return count;
}
 
Example 2
Project: apollo-custom   File: AppNamespaceServiceWithCache.java   View Source Code Vote up 6 votes
private void updateAndDeleteCache() {
  List<Long> ids = Lists.newArrayList(appNamespaceIdCache.keySet());
  if (CollectionUtils.isEmpty(ids)) {
    return;
  }
  List<List<Long>> partitionIds = Lists.partition(ids, 500);
  for (List<Long> toRebuild : partitionIds) {
    Iterable<AppNamespace> appNamespaces = appNamespaceRepository.findAll(toRebuild);

    if (appNamespaces == null) {
      continue;
    }

    //handle updated
    Set<Long> foundIds = handleUpdatedAppNamespaces(appNamespaces);

    //handle deleted
    handleDeletedAppNamespaces(Sets.difference(Sets.newHashSet(toRebuild), foundIds));
  }
}
 
Example 3
Project: googlecloud-techtalk   File: AbstractRepository.java   View Source Code Vote up 6 votes
@Override
public int reindex(List<K> keys, int batchSize, ReindexOperation<E> reindexOperation) {
    int count = 0;
    List<List<K>> batches = Lists.partition(keys, batchSize);
    for (List<K> batchKeys : batches) {
        List<E> batch = get(batchKeys);
        batch = reindexOperation == null ? batch : reindexOperation.apply(batch);
        if (reindexOperation != null) {
            // we only re-save the batch when a re-index op is supplied, otherwise the data can't have changed.
            ofy().save().entities(batch).now();
        }
        if (shouldSearch()) {
            index(batch).complete();
        }
        count += batch.size();
        ofy().clear(); // Clear the Objectify cache to free memory for next batch
        Logger.info("Reindexed %d entities of type %s, %d of %d", batch.size(), entityType.getSimpleName(), count, keys.size());
    }
    return count;
}
 
Example 4
Project: AppleSeed   File: AbstractRepository.java   View Source Code Vote up 6 votes
@Override
public int reindex(List<K> keys, int batchSize, ReindexOperation<E> reindexOperation) {
    int count = 0;
    List<List<K>> batches = Lists.partition(keys, batchSize);
    for (List<K> batchKeys : batches) {
        List<E> batch = get(batchKeys);
        batch = reindexOperation == null ? batch : reindexOperation.apply(batch);
        if (reindexOperation != null) {
            // we only re-save the batch when a re-index op is supplied, otherwise the data can't have changed.
            ofy().save().entities(batch).now();
        }
        if (shouldSearch()) {
            index(batch).complete();
        }
        count += batch.size();
        ofy().clear(); // Clear the Objectify cache to free memory for next batch
        Logger.info("Reindexed %d entities of type %s, %d of %d", batch.size(), entityType.getSimpleName(), count, keys.size());
    }
    return count;
}
 
Example 5
Project: custom-bytecode-analyzer   File: ReportBuilder.java   View Source Code Vote up 6 votes
private static List<String> generateHtmlChunks(List<ReportItem> reportItemList) {
  List<String> htmlChunks = new ArrayList<>();

  VelocityEngine velocityEngine = new VelocityEngine();
  Properties p = new Properties();
  p.setProperty("resource.loader", "class");
  p.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
  velocityEngine.init(p);
  Template template = velocityEngine.getTemplate("template/report_template.html");

  int maxItemsInReport = CliHelper.getMaxItemsInReport();
  List<List<ReportItem>> reportItemsChunks = Lists.partition(reportItemList, maxItemsInReport);

  for (List<ReportItem> reportItemsChunk : reportItemsChunks ) {
    VelocityContext velocityContext = new VelocityContext();
    velocityContext.put("jarPath", CliHelper.getPathToAnalyze());
    velocityContext.put("ruleName", reportItemsChunk.get(0).getRuleName());
    velocityContext.put("reportItems", reportItemsChunk);

    StringWriter stringWriter = new StringWriter();
    template.merge(velocityContext, stringWriter);
    htmlChunks.add(stringWriter.toString());
  }
  return htmlChunks;
}
 
Example 6
Project: OperatieBRP   File: PersoonCacheConsumer.java   View Source Code Vote up 6 votes
private void publiceerSelectieTaken(List<SelectieAutorisatieBericht> selectieAutorisatieBerichten, List<PersoonCache> caches) {
    final List<SelectieVerwerkTaakBericht> selectieTaken = new ArrayList<>();
    //verdeel personen in config chunk size
    final List<List<PersoonCache>> persoonsLijstChunks = Lists.partition(caches, configuratieService.getBlobsPerSelectieTaak());
    //verdeel autorisaties in config chunk size
    final List<List<SelectieAutorisatieBericht>> autorisatieChunks =
            Lists.partition(selectieAutorisatieBerichten, configuratieService.getAutorisatiesPerSelectieTaak());
    //maak selectietaken
    for (List<SelectieAutorisatieBericht> autorisatieChunk : autorisatieChunks) {
        for (List<PersoonCache> bundelChunk : persoonsLijstChunks) {
            SelectieVerwerkTaakBericht selectieTaak = maakSelectieTaak(bundelChunk, autorisatieChunk);
            selectieTaken.add(selectieTaak);
            selectieJobRunStatusService.getStatus().incrementEnGetVerwerkTaken();
        }
    }
    selectieTaakPublicatieService.publiceerSelectieTaak(selectieTaken);
}
 
Example 7
Project: sonar-css-plugin   File: RuleDescriptionsGenerator.java   View Source Code Vote up 6 votes
private String generateHtmlCssFunctionTable(List<StandardFunction> standardFunctions) {
  StringBuilder html = new StringBuilder("<table style=\"border: 0;\">\n");
  List<List<StandardFunction>> subLists = Lists.partition(standardFunctions, 3);
  for (List<StandardFunction> subList : subLists) {
    html.append("<tr>");
    for (StandardFunction standardCssFunction : subList) {
      List<String> links = standardCssFunction.getLinks().stream().filter(f -> !f.contains("lesscss.org")).collect(Collectors.toList());
      html.append("<td style=\"border: 0; \">");
      if (!links.isEmpty()) {
        html.append("<a target=\"_blank\" href=\"").append(links.get(0)).append("\">");
      }
      html.append("<code>").append(standardCssFunction.getName()).append("</code>");
      if (!links.isEmpty()) {
        html.append("</a>");
      }
      html.append("</code>");
      for (int i = 1; i < links.size(); i++) {
        html.append("&nbsp;&nbsp;<a target=\"_blank\" href=\"").append(links.get(i)).append("\">#").append(i + 1).append("</a>");
      }
      html.append("</td>\n");
    }
    html.append("</tr>");
  }
  html.append("</table>\n");
  return html.toString();
}
 
Example 8
Project: AdvancedDataProfilingSeminar   File: Exporter.java   View Source Code Vote up 5 votes
private static void export(final Configuration configuration,
    final TableInfo table) throws IOException {

  warnOnMultipleScans(configuration, table);
  final List<Attribute> attributes = CommonObjects.getTableToAttribute().get(table);
  final List<List<Attribute>> groups = Lists.partition(attributes, configuration.getOpenFileNr());

  int startIndex = 0;
  for (final List<Attribute> group : groups) {
    final List<Writer> writers = writeToDisk(configuration, table, group, startIndex);
    uniqueAndSort(configuration, writers);
    startIndex += group.size();
  }
}
 
Example 9
Project: tokenapp-backend   File: Etherscan.java   View Source Code Vote up 5 votes
/**
 * This may take a while, make sure you obey the limits of the api provider
 */
public BigInteger getBalances(List<String> contract) throws IOException {
    BigInteger result = BigInteger.ZERO;
    List<List<String>> part = Lists.partition(contract, 20);
    for(List<String> p:part) {
        result = result.add(get20Balances(p));
    }
    return result;
    //TODO:caching!
}
 
Example 10
Project: guava-mock   File: StatsTesting.java   View Source Code Vote up 5 votes
/**
 * Creates a {@link PairedStatsAccumulator} filled with the given lists of {@code x} and {@code y}
 * values, which must be of the same size, added in groups of {@code partitionSize} using
 * {@link PairedStatsAccumulator#addAll(PairedStats)}.
 */
static PairedStatsAccumulator createPartitionedFilledPairedStatsAccumulator(
    List<Double> xValues, List<Double> yValues, int partitionSize) {
  checkArgument(xValues.size() == yValues.size());
  checkArgument(partitionSize > 0);
  PairedStatsAccumulator accumulator = new PairedStatsAccumulator();
  List<List<Double>> xPartitions = Lists.partition(xValues, partitionSize);
  List<List<Double>> yPartitions = Lists.partition(yValues, partitionSize);
  for (int index = 0; index < xPartitions.size(); index++) {
    accumulator.addAll(createPairedStatsOf(xPartitions.get(index), yPartitions.get(index)));
  }
  return accumulator;
}
 
Example 11
Project: QDrill   File: FileSystemPartitionDescriptor.java   View Source Code Vote up 5 votes
@Override
protected void createPartitionSublists() {
  List<String> fileLocations = ((FormatSelection) scanRel.getDrillTable().getSelection()).getAsFiles();
  List<PartitionLocation> locations = new LinkedList<>();
  for (String file: fileLocations) {
    locations.add(new DFSPartitionLocation(MAX_NESTED_SUBDIRS, getBaseTableLocation(), file));
  }
  locationSuperList = Lists.partition(locations, PartitionDescriptor.PARTITION_BATCH_SIZE);
  sublistsCreated = true;
}
 
Example 12
Project: QDrill   File: ParquetPartitionDescriptor.java   View Source Code Vote up 5 votes
@Override
protected void createPartitionSublists() {
  Set<String> fileLocations = ((ParquetGroupScan) scanRel.getGroupScan()).getFileSet();
  List<PartitionLocation> locations = new LinkedList<>();
  for (String file: fileLocations) {
    locations.add(new DFSPartitionLocation(MAX_NESTED_SUBDIRS, getBaseTableLocation(), file));
  }
  locationSuperList = Lists.partition(locations, PartitionDescriptor.PARTITION_BATCH_SIZE);
  sublistsCreated = true;
}
 
Example 13
Project: QDrill   File: HivePartitionDescriptor.java   View Source Code Vote up 5 votes
@Override
protected void createPartitionSublists() {
  List<PartitionLocation> locations = new LinkedList<>();
  HiveReadEntry origEntry = ((HiveScan) scanRel.getGroupScan()).hiveReadEntry;
  for (Partition partition: origEntry.getPartitions()) {
    locations.add(new HivePartitionLocation(partition.getValues(), partition.getSd().getLocation()));
  }
  locationSuperList = Lists.partition(locations, PartitionDescriptor.PARTITION_BATCH_SIZE);
  sublistsCreated = true;
}
 
Example 14
Project: OperatieBRP   File: PersoonsBeeldenServiceImpl.java   View Source Code Vote up 5 votes
private void zetOpdrachtenOpQueue(SelectieVerwerkTaakBericht selectieTaak, int verwerkerPoolSize,
                                  BlockingQueue<MaakPersoonslijstBatchOpdracht> persoonsBeeldTaakQueue)
        throws InterruptedException {
    //zet de opdrachten op de queue
    final List<List<SelectiePersoonBericht>> bundelChunks = Lists.partition(selectieTaak.getPersonen(), verwerkerPoolSize);
    for (List<SelectiePersoonBericht> bundelChunk : bundelChunks) {
        final MaakPersoonslijstBatchOpdracht maakPersoonslijstBatchOpdracht = new MaakPersoonslijstBatchOpdracht();
        maakPersoonslijstBatchOpdracht.setCaches(bundelChunk);
        persoonsBeeldTaakQueue.put(maakPersoonslijstBatchOpdracht);
    }
}
 
Example 15
Project: spark-cassandra-poc   File: CassandraConnection.java   View Source Code Vote up 5 votes
@Override
public void insertVideoEvents(List<VideoViewEvent> videoViewEvents) {
	try (Cluster cassandraConnection = buildConnection()) {
		try (Session session = cassandraConnection.connect()) {

			List<List<VideoViewEvent>> partition = Lists.partition(videoViewEvents, batchSize);
			int total = 0;
			for (List<VideoViewEvent> list : partition) {

				String q = "BEGIN BATCH \n";

				for (VideoViewEvent videoViewEvent : list) {
					String insertQuery = "insert into wootag.video_view (" + "user_id," + "	video_id, "
							+ "	session_id, " + "	event_start_timestamp, "
							+ "	view_duration_in_second) VALUES ";
					insertQuery += "\n (" + "'" + videoViewEvent.getUserId() + "'" + "," + "'"
							+ videoViewEvent.getVideoId() + "'" + "," + "'" + videoViewEvent.getSessionId() + "'"
							+ "," + videoViewEvent.getEventStartTimestamp() + ","
							+ videoViewEvent.getViewDurationInSeconds() + ");\n";
					q += insertQuery;
				}
				session.execute(q + " APPLY BATCH; ");
				total += batchSize;
				System.out.println("Executing batch of " + batchSize + ", Total : " + total);
			}
		}
	}
}
 
Example 16
Project: spark-cassandra-poc   File: CassandraConnection.java   View Source Code Vote up 5 votes
@Override
public void insertRows(List<org.apache.spark.sql.Row> collectAsList, String tableName, List<String> columns)
		throws QueryExecutionException {

	try (Cluster cassandraConnection = buildConnection()) {
		try (Session session = cassandraConnection.connect()) {
			System.out.println("columns : " + columns);
			List<List<org.apache.spark.sql.Row>> partition = Lists.partition(collectAsList, batchSize);
			int total = 0;
			for (List<org.apache.spark.sql.Row> list : partition) {

				String q = "BEGIN BATCH \n";

				for (org.apache.spark.sql.Row row : list) {
					String insertQuery = "insert into wootag." + tableName + " (" + columns.get(0) + ", "
							+ columns.get(1) + ", " + columns.get(2) + ") VALUES ";
					insertQuery += "\n (" + "'" + row.getString(0) + "'" + "," + row.getLong(1) + ","
							+ row.getLong(2) + ");\n";
					q += insertQuery;
				}
				session.execute(q + " APPLY BATCH; ");
				total += batchSize;
				System.out.println("Executing batch of " + batchSize + ", Total : " + total);
			}
		}
	}

}
 
Example 17
Project: sonar-css-plugin   File: RuleDescriptionsGenerator.java   View Source Code Vote up 5 votes
private String generateHtmlTableFromListOfStrings(List<String> elements) {
  StringBuilder html = new StringBuilder("<table style=\"border: 0;\">\n");
  List<List<String>> subLists = Lists.partition(elements, 3);
  for (List<String> subList : subLists) {
    html.append("<tr>");
    for (String element : subList) {
      html.append("<td style=\"border: 0; \">");
      html.append("<code>").append(element).append("</code>");
      html.append("</td>\n");
    }
    html.append("</tr>");
  }
  html.append("</table>\n");
  return html.toString();
}
 
Example 18
Project: xsharing-services-router   File: RasterManagerImpl.java   View Source Code Vote up 5 votes
/**
 * Create raster according to specified parameters and save it to the database
 * @param minLon lower boundary on longitude
 * @param minLat lower boundary on latitude
 * @param maxLon upper boundary on longitude
 * @param maxLat upper boundary on latitude
 * @param alpha granularity coefficient
 */
@Override
public void createRaster(double minLon, double minLat, double maxLon, double maxLat, double alpha) {
    int latCount = ((int) Math.floor((maxLat - minLat) / alpha));
    int lonCount = ((int) Math.floor((maxLon - minLon) / alpha));

    pointsLock.lock();
    try {
        for (int i = 0; i <= latCount; i++) {
            for (int j = 0; j <= lonCount; j++) {
                double lon = minLon + j * alpha;
                double lat = minLat + i * alpha;
                GeoCoord point = new GeoCoord(lon, lat);
                points.put(point.hashCode(), new RasterPoint(point));
            }
        }
    } finally {
        pointsLock.unlock();
    }

    log.info("Created raster list with {} entries!", points.size());

    int batchSize = 100;

    List<RasterPoint> allPoints = new ArrayList<>(points.values());
    List<List<RasterPoint>> batchList = Lists.partition(allPoints, batchSize);

    log.info("Will insert raster list into database in {} batches each with {} size", batchList.size(), batchSize);

    for (List<RasterPoint> batch : batchList) {
        insertOneBatch(batch);
    }

    log.info("Saved raster list to database");
}
 
Example 19
Project: googles-monorepo-demo   File: StatsTesting.java   View Source Code Vote up 5 votes
/**
 * Creates a {@link PairedStatsAccumulator} filled with the given lists of {@code x} and {@code y}
 * values, which must be of the same size, added in groups of {@code partitionSize} using
 * {@link PairedStatsAccumulator#addAll(PairedStats)}.
 */
static PairedStatsAccumulator createPartitionedFilledPairedStatsAccumulator(
    List<Double> xValues, List<Double> yValues, int partitionSize) {
  checkArgument(xValues.size() == yValues.size());
  checkArgument(partitionSize > 0);
  PairedStatsAccumulator accumulator = new PairedStatsAccumulator();
  List<List<Double>> xPartitions = Lists.partition(xValues, partitionSize);
  List<List<Double>> yPartitions = Lists.partition(yValues, partitionSize);
  for (int index = 0; index < xPartitions.size(); index++) {
    accumulator.addAll(createPairedStatsOf(xPartitions.get(index), yPartitions.get(index)));
  }
  return accumulator;
}
 
Example 20
Project: r8   File: MainDexListTests.java   View Source Code Vote up 4 votes
@Test
public void deterministicTest() throws Exception {
  // Synthesize a dex containing a few empty classes including some in the default package.
  // Everything can fit easily in a single dex file.
  ImmutableList<String> classes = new ImmutableList.Builder<String>()
      .add("A")
      .add("B")
      .add("C")
      .add("D")
      .add("E")
      .add("F")
      .add("A1")
      .add("A2")
      .add("A3")
      .add("A4")
      .add("A5")
      .add("maindexlist.A")
      .add("maindexlist.B")
      .add("maindexlist.C")
      .add("maindexlist.D")
      .add("maindexlist.E")
      .add("maindexlist.F")
      .add("maindexlist.A1")
      .add("maindexlist.A2")
      .add("maindexlist.A3")
      .add("maindexlist.A4")
      .add("maindexlist.A5")
      .build();

  JasminBuilder jasminBuilder = new JasminBuilder();
  for (String name : classes) {
    jasminBuilder.addClass(name);
  }
  Path input = temp.newFolder().toPath().resolve("input.zip");
  ToolHelper.runR8(jasminBuilder.build()).writeToZip(input, OutputMode.Indexed);

  // Test with empty main dex list.
  runDeterministicTest(input, null, true);

  // Test with main-dex list with all classes.
  runDeterministicTest(input, classes, true);

  // Test with main-dex list with first and second half of the classes.
  List<List<String>> partitions = Lists.partition(classes, classes.size() / 2);
  runDeterministicTest(input, partitions.get(0), false);
  runDeterministicTest(input, partitions.get(1), false);

  // Test with main-dex list with every second of the classes.
  runDeterministicTest(input,
      IntStream.range(0, classes.size())
          .filter(n -> n % 2 == 0)
          .mapToObj(classes::get)
          .collect(Collectors.toList()), false);
  runDeterministicTest(input,
      IntStream.range(0, classes.size())
          .filter(n -> n % 2 == 1)
          .mapToObj(classes::get)
          .collect(Collectors.toList()), false);
}