Java Code Examples for java.util.Deque#clear()

The following examples show how to use java.util.Deque#clear() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VplsOperationManagerTest.java    From onos with Apache License 2.0 6 votes vote down vote up
/**
 * Submits same operation twice to the manager; the manager should ignore
 * duplicated operation.
 */
@Test
@Ignore("Test is brittle - revisit")
public void testDuplicateOperationInQueue() {
    VplsData vplsData = VplsData.of(VPLS1);
    vplsData.addInterfaces(ImmutableSet.of(V100H1, V100H2));

    VplsOperation vplsOperation = VplsOperation.of(vplsData,
                                                   VplsOperation.Operation.ADD);

    vplsOperationManager.submit(vplsOperation);
    vplsOperationManager.submit(vplsOperation);
    Deque<VplsOperation> opQueue = vplsOperationManager.pendingVplsOperations.get(VPLS1);
    assertEquals(1, opQueue.size());

    // Clear operation queue before scheduler process it
    opQueue.clear();
}
 
Example 2
Source File: BuckEventOrderer.java    From buck with Apache License 2.0 6 votes vote down vote up
public void add(T buckEvent) {
  Deque<T> queue = getQueueForEvent(buckEvent);
  boolean shouldAddToEventQueue = queue.isEmpty();
  if (!queue.isEmpty() && queue.getLast().getNanoTime() > buckEvent.getNanoTime()) {
    // We assume inserting events configured at a time in the past is very rare.
    oldestEventQueue.remove(queue);
    List<T> mergedEventsList = new ArrayList<>(queue.size() + 1);
    while (!queue.isEmpty() && queue.getFirst().getNanoTime() <= buckEvent.getNanoTime()) {
      mergedEventsList.add(queue.removeFirst());
    }
    mergedEventsList.add(buckEvent);
    mergedEventsList.addAll(queue);
    queue.clear();
    queue.addAll(mergedEventsList);
    oldestEventQueue.add(queue);
  } else {
    queue.add(buckEvent);
  }
  if (shouldAddToEventQueue) {
    oldestEventQueue.add(queue);
  }
  if (maximumNanoTime < buckEvent.getNanoTime()) {
    maximumNanoTime = buckEvent.getNanoTime();
  }
  dispatchEventsOlderThan(maximumNanoTime - maximumSkewNanos);
}
 
Example 3
Source File: ExecuteMethodChecker.java    From lastaflute with Apache License 2.0 6 votes vote down vote up
protected void doCheckJsonBeanValidator(Class<?> jsonBeanType, Map<String, Class<?>> genericMap) {
    final Deque<String> pathDeque = new LinkedList<String>(); // recycled
    final Set<Class<?>> mismatchedCheckedTypeSet = DfCollectionUtil.newHashSet(jsonBeanType);
    final Set<Class<?>> lonelyCheckedTypeSet = DfCollectionUtil.newHashSet(jsonBeanType);
    final BeanDesc beanDesc;
    try {
        beanDesc = BeanDescFactory.getBeanDesc(jsonBeanType);
    } catch (RuntimeException e) { // may be setAccessible(true) failure
        throwExecuteMethodJsonBeanDescFailureException(jsonBeanType, genericMap, e);
        return; // unreachable
    }
    final int pdSide = beanDesc.getPropertyDescSize();
    for (int i = 0; i < pdSide; i++) {
        final PropertyDesc pd = beanDesc.getPropertyDesc(i);
        final Field field = pd.getField();
        if (field != null) {
            pathDeque.clear(); // to recycle
            checkJsonBeanMismatchedValidatorAnnotation(jsonBeanType, pd, field, pathDeque, mismatchedCheckedTypeSet, genericMap);
            pathDeque.clear(); // to recycle
            checkJsonBeanLonelyValidatorAnnotation(jsonBeanType, pd, field, pathDeque, lonelyCheckedTypeSet, genericMap);
        }
    }
}
 
Example 4
Source File: ParseIDHandler.java    From J-Kinopoisk2IMDB with Apache License 2.0 5 votes vote down vote up
/**
 * Finds movie matching to movie argument using {@link MovieComparator} or {@link Optional#empty()} on failure
 *
 * @param movie  Movie similar to which should be found
 * @param movies Deque in which perform search
 * @return Optional of found matching movie
 */
private Optional<Movie> findMatchingMovie(@NonNull final Movie movie, @NonNull final Deque<Movie> movies) {
    while (!movies.isEmpty()) {
        Movie imdbMovie = movies.poll();

        if (movieComparator.areEqual(movie, imdbMovie)) {
            movies.clear();
            return Optional.of(imdbMovie);
        }
    }

    return Optional.empty();
}
 
Example 5
Source File: MemoryTransactionService.java    From sql-layer with GNU Affero General Public License v3.0 4 votes vote down vote up
private static void clearStack(Session session, Session.StackKey<Callback> key) {
    Deque<Callback> stack = session.get(key);
    if(stack != null) {
        stack.clear();
    }
}
 
Example 6
Source File: WriteXMLResult.java    From nifi with Apache License 2.0 4 votes vote down vote up
private void writeAllTags(Deque<String> tagsToOpen) throws XMLStreamException {
    for (String tagName : tagsToOpen) {
        writer.writeStartElement(escapeTagName(tagName));
    }
    tagsToOpen.clear();
}
 
Example 7
Source File: SDeque.java    From jane with GNU Lesser General Public License v3.0 4 votes vote down vote up
public void cloneTo(Deque<V> deque)
{
	deque.clear();
	Util.appendDeep(_deque, deque);
}
 
Example 8
Source File: LinkedDequeTest.java    From caffeine with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "full")
public void clear_whenPopulated(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(deeplyEmpty()));
}
 
Example 9
Source File: LinkedDequeTest.java    From caffeine with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "empty")
public void clear_whenEmpty(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(deeplyEmpty()));
}
 
Example 10
Source File: LinkedDequeTest.java    From concurrentlinkedhashmap with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "warmedDeque")
public void clear_whenPopulated(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(emptyCollection()));
}
 
Example 11
Source File: LinkedDequeTest.java    From concurrentlinkedhashmap with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "emptyDeque")
public void clear_whenEmpty(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(emptyCollection()));
}
 
Example 12
Source File: FDBTransactionService.java    From sql-layer with GNU Affero General Public License v3.0 4 votes vote down vote up
protected void clearStack(Session session, Session.StackKey<Callback> key) {
    Deque<Callback> stack = session.get(key);
    if(stack != null) {
        stack.clear();
    }
}
 
Example 13
Source File: MarkListWalker.java    From perfmark with Apache License 2.0 4 votes vote down vote up
private static void createFakes(
    Deque<? super Mark> fakeStarts,
    Deque<? super Mark> fakeEnds,
    Set<? super Mark> unmatchedPairMarks,
    List<Mark> marks,
    long nowNanoTime) {
  final Deque<Mark> unmatchedMarks = new ArrayDeque<>();
  long[] nanoTimeBounds = new long[2]; // first, last
  nanoTimeBounds[0] = nowNanoTime; // forces each subsequent overwrite to succeed.
  nanoTimeBounds[1] = nowNanoTime;

  loop:
  for (Mark mark : marks) {
    setNanoTimeBounds(nanoTimeBounds, mark);
    switch (mark.getOperation().getOpType()) {
      case TASK_START:
        unmatchedMarks.addLast(mark);
        continue loop;
      case TASK_END:
        if (!unmatchedMarks.isEmpty()) {
          // TODO: maybe double check the tags and task names match
          unmatchedMarks.removeLast();
        } else {
          fakeStarts.addFirst(createFakeStart(mark, nanoTimeBounds[0]));
          unmatchedPairMarks.add(mark);
        }
        continue loop;
      case EVENT:
      case LINK:
      case TAG:
      case MARK:
        continue loop;
      case NONE:
        break;
    }
    throw new AssertionError();
  }
  for (Mark unmatchedMark : unmatchedMarks) {
    fakeEnds.addFirst(createFakeEnd(unmatchedMark, nanoTimeBounds[1]));
    unmatchedPairMarks.add(unmatchedMark);
  }
  unmatchedMarks.clear();
}
 
Example 14
Source File: PSClearCommand.java    From latexdraw with GNU General Public License v3.0 4 votes vote down vote up
@Override
public void execute(final Deque<Double> stack, final double x) {
	stack.clear();
}
 
Example 15
Source File: CombineRunnersTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/**
 * Create an Extract Outputs function that is given keyed accumulators and validates that the
 * accumulators were turned into the output type.
 */
@Test
public void testExtractOutputs() throws Exception {
  // Create a map of consumers and an output target to check output values.
  MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
  PCollectionConsumerRegistry consumers =
      new PCollectionConsumerRegistry(
          metricsContainerRegistry, mock(ExecutionStateTracker.class));
  Deque<WindowedValue<KV<String, Integer>>> mainOutputValues = new ArrayDeque<>();
  consumers.register(
      Iterables.getOnlyElement(pTransform.getOutputsMap().values()),
      TEST_COMBINE_ID,
      (FnDataReceiver)
          (FnDataReceiver<WindowedValue<KV<String, Integer>>>) mainOutputValues::add);

  PTransformFunctionRegistry startFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "start");
  PTransformFunctionRegistry finishFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "finish");

  // Create runner.
  MapFnRunners.forValueMapFnFactory(CombineRunners::createExtractOutputsMapFunction)
      .createRunnerForPTransform(
          PipelineOptionsFactory.create(),
          null /* beamFnDataClient */,
          null /* beamFnStateClient */,
          null /* beamFnTimerClient */,
          TEST_COMBINE_ID,
          pTransform,
          null,
          Collections.emptyMap(),
          Collections.emptyMap(),
          Collections.emptyMap(),
          consumers,
          startFunctionRegistry,
          finishFunctionRegistry,
          null, /* tearDownRegistry */
          null /* addProgressRequestCallback */,
          null /* splitListener */,
          null /* bundleFinalizer */);

  assertThat(startFunctionRegistry.getFunctions(), empty());
  assertThat(finishFunctionRegistry.getFunctions(), empty());

  // Send elements to runner and check outputs.
  mainOutputValues.clear();
  assertThat(consumers.keySet(), containsInAnyOrder(inputPCollectionId, outputPCollectionId));

  FnDataReceiver<WindowedValue<?>> input = consumers.getMultiplexingConsumer(inputPCollectionId);
  input.accept(valueInGlobalWindow(KV.of("A", 9)));
  input.accept(valueInGlobalWindow(KV.of("B", 5)));
  input.accept(valueInGlobalWindow(KV.of("C", 7)));

  assertThat(
      mainOutputValues,
      contains(
          valueInGlobalWindow(KV.of("A", -9)),
          valueInGlobalWindow(KV.of("B", -5)),
          valueInGlobalWindow(KV.of("C", -7))));
}
 
Example 16
Source File: CombineRunnersTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/**
 * Create a Merge Accumulators function that is given keyed lists of accumulators and validates
 * that the accumulators of each list were merged.
 */
@Test
public void testMergeAccumulators() throws Exception {
  // Create a map of consumers and an output target to check output values.
  MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
  PCollectionConsumerRegistry consumers =
      new PCollectionConsumerRegistry(
          metricsContainerRegistry, mock(ExecutionStateTracker.class));
  Deque<WindowedValue<KV<String, Integer>>> mainOutputValues = new ArrayDeque<>();
  consumers.register(
      Iterables.getOnlyElement(pTransform.getOutputsMap().values()),
      TEST_COMBINE_ID,
      (FnDataReceiver)
          (FnDataReceiver<WindowedValue<KV<String, Integer>>>) mainOutputValues::add);

  PTransformFunctionRegistry startFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "start");
  PTransformFunctionRegistry finishFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "finish");

  // Create runner.
  MapFnRunners.forValueMapFnFactory(CombineRunners::createMergeAccumulatorsMapFunction)
      .createRunnerForPTransform(
          PipelineOptionsFactory.create(),
          null /* beamFnDataClient */,
          null /* beamFnStateClient */,
          null /* beamFnTimerClient */,
          TEST_COMBINE_ID,
          pTransform,
          null,
          Collections.emptyMap(),
          Collections.emptyMap(),
          Collections.emptyMap(),
          consumers,
          startFunctionRegistry,
          finishFunctionRegistry,
          null, /* tearDownRegistry */
          null /* addProgressRequestCallback */,
          null /* splitListener */,
          null /* bundleFinalizer */);

  assertThat(startFunctionRegistry.getFunctions(), empty());
  assertThat(finishFunctionRegistry.getFunctions(), empty());

  // Send elements to runner and check outputs.
  mainOutputValues.clear();
  assertThat(consumers.keySet(), containsInAnyOrder(inputPCollectionId, outputPCollectionId));

  FnDataReceiver<WindowedValue<?>> input = consumers.getMultiplexingConsumer(inputPCollectionId);
  input.accept(valueInGlobalWindow(KV.of("A", Arrays.asList(1, 2, 6))));
  input.accept(valueInGlobalWindow(KV.of("B", Arrays.asList(2, 3))));
  input.accept(valueInGlobalWindow(KV.of("C", Arrays.asList(5, 2))));

  assertThat(
      mainOutputValues,
      contains(
          valueInGlobalWindow(KV.of("A", 9)),
          valueInGlobalWindow(KV.of("B", 5)),
          valueInGlobalWindow(KV.of("C", 7))));
}
 
Example 17
Source File: CombineRunnersTest.java    From beam with Apache License 2.0 4 votes vote down vote up
/**
 * Create a Precombine that is given keyed elements and validates that the outputted elements
 * values' are accumulators that were correctly derived from the input.
 */
@Test
public void testPrecombine() throws Exception {
  // Create a map of consumers and an output target to check output values.
  MetricsContainerStepMap metricsContainerRegistry = new MetricsContainerStepMap();
  PCollectionConsumerRegistry consumers =
      new PCollectionConsumerRegistry(
          metricsContainerRegistry, mock(ExecutionStateTracker.class));
  Deque<WindowedValue<KV<String, Integer>>> mainOutputValues = new ArrayDeque<>();
  consumers.register(
      Iterables.getOnlyElement(pTransform.getOutputsMap().values()),
      TEST_COMBINE_ID,
      (FnDataReceiver)
          (FnDataReceiver<WindowedValue<KV<String, Integer>>>) mainOutputValues::add);

  PTransformFunctionRegistry startFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "start");
  PTransformFunctionRegistry finishFunctionRegistry =
      new PTransformFunctionRegistry(
          mock(MetricsContainerStepMap.class), mock(ExecutionStateTracker.class), "finish");

  // Create runner.
  new CombineRunners.PrecombineFactory<>()
      .createRunnerForPTransform(
          PipelineOptionsFactory.create(),
          null /* beamFnDataClient */,
          null /* beamFnStateClient */,
          null /* beamFnTimerClient */,
          TEST_COMBINE_ID,
          pTransform,
          null,
          pProto.getComponents().getPcollectionsMap(),
          pProto.getComponents().getCodersMap(),
          pProto.getComponents().getWindowingStrategiesMap(),
          consumers,
          startFunctionRegistry,
          finishFunctionRegistry,
          null, /* tearDownRegistry */
          null /* addProgressRequestCallback */,
          null /* splitListener */,
          null /* bundleFinalizer */);

  Iterables.getOnlyElement(startFunctionRegistry.getFunctions()).run();

  // Send elements to runner and check outputs.
  mainOutputValues.clear();
  assertThat(consumers.keySet(), containsInAnyOrder(inputPCollectionId, outputPCollectionId));

  FnDataReceiver<WindowedValue<?>> input = consumers.getMultiplexingConsumer(inputPCollectionId);
  input.accept(valueInGlobalWindow(KV.of("A", "1")));
  input.accept(valueInGlobalWindow(KV.of("A", "2")));
  input.accept(valueInGlobalWindow(KV.of("A", "6")));
  input.accept(valueInGlobalWindow(KV.of("B", "2")));
  input.accept(valueInGlobalWindow(KV.of("C", "3")));

  Iterables.getOnlyElement(finishFunctionRegistry.getFunctions()).run();

  // Check that all values for "A" were converted to accumulators regardless of how they were
  // combined by the Precombine optimization.
  Integer sum = 0;
  for (WindowedValue<KV<String, Integer>> outputValue : mainOutputValues) {
    if ("A".equals(outputValue.getValue().getKey())) {
      sum += outputValue.getValue().getValue();
    }
  }
  assertThat(sum, equalTo(9));

  // Check that elements for "B" and "C" are present as well.
  mainOutputValues.removeIf(elem -> "A".equals(elem.getValue().getKey()));
  assertThat(
      mainOutputValues,
      containsInAnyOrder(valueInGlobalWindow(KV.of("B", 2)), valueInGlobalWindow(KV.of("C", 3))));
}
 
Example 18
Source File: LinkedDequeTest.java    From multiway-pool with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "emptyDeque")
public void clear_whenEmpty(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(empty()));
}
 
Example 19
Source File: Logger.java    From rcrs-server with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
/**
   Set the log context for this thread and all child threads.
   @param context The new log context.
*/
public static void setLogContext(String context) {
    Deque<org.apache.log4j.Logger> queue = LOG.get();
    queue.clear();
    queue.addLast(LogManager.getLogger(context));
}
 
Example 20
Source File: LinkedDequeTest.java    From multiway-pool with Apache License 2.0 4 votes vote down vote up
@Test(dataProvider = "warmedDeque")
public void clear_whenPopulated(Deque<?> deque) {
  deque.clear();
  assertThat(deque, is(empty()));
}