Java Code Examples for java.util.Queue#add()

The following examples show how to use java.util.Queue#add() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DataGeneratorBB.java    From gemfirexd-oss with Apache License 2.0 6 votes vote down vote up
public static Queue<Map<String, Object>> getRowsFromBB(String fullTableName) {
  final Queue<Map<String, Object>> tableRowsQueue = new LinkedBlockingQueue<Map<String, Object>>();
  String key = TABLE_ROW_QUEUE_PREFIX + fullTableName.toUpperCase();
  bbInstance.getSharedLock().lock();
  Queue<Map<String, Object>> allRowsQueue = (Queue<Map<String, Object>>)bbInstance
      .getSharedMap().get(key);
  int queueSize = allRowsQueue.size();
  if (queueSize < 500) {
    setRowsLowThresholdReached(fullTableName, true);
  }
  int rowsToReturn = queueSize > 100 ? 100 : queueSize;    
  for (int i = 0; i < rowsToReturn; i++) {
    tableRowsQueue.add(allRowsQueue.poll());
  }
  bbInstance.getSharedMap().put(key, allRowsQueue);
  bbInstance.getSharedLock().unlock();
  return tableRowsQueue;
}
 
Example 2
Source File: Solution.java    From codekata with MIT License 6 votes vote down vote up
public List<Double> averageOfLevels(TreeNode root) {
    ArrayList<Double> result = new ArrayList<>();
    Queue<TreeNode> queue = new LinkedList<>();
    queue.add(root);
    while (!queue.isEmpty()) {
        int size = queue.size();
        double sum = 0;
        for (int i = 0; i < size; i++) {
            TreeNode tmp = queue.poll();
            sum += (double) tmp.val;
            if (tmp.left != null) queue.add(tmp.left);
            if (tmp.right != null) queue.add(tmp.right);
        }
        result.add(sum / size);
    }
    return result;
}
 
Example 3
Source File: LeetCode226.java    From Project with Apache License 2.0 6 votes vote down vote up
public TreeNode invertTree2(TreeNode root) {
    if (root == null) {
        return null;
    }
    // 使用一个队列存放左右孩子还没有交换的结点
    Queue<TreeNode> queue = new LinkedList<>();
    queue.add(root);

    while (!queue.isEmpty()) {
        // 取出当前队列中节点,然后交换其左右结点
        TreeNode currentNode = queue.poll();
        TreeNode tempNode = currentNode.left;
        currentNode.left = currentNode.right;
        currentNode.right = tempNode;

        // 如果当前结点的左右结点不空,将其压入队列中
        if (currentNode.left != null) {
            queue.add(currentNode.left);
        }
        if (currentNode.right != null) {
            queue.add(currentNode.right);
        }
    }
    return root;
}
 
Example 4
Source File: SafeCallManagerImpl.java    From openhab-core with Eclipse Public License 2.0 5 votes vote down vote up
@Override
public void enqueue(Invocation invocation) {
    synchronized (queues) {
        Queue<Invocation> queue = queues.get(invocation.getIdentifier());
        if (queue == null) {
            queue = new LinkedList<>();
            queues.put(invocation.getIdentifier(), queue);
        }
        queue.add(invocation);
    }
    trigger(invocation.getIdentifier());
}
 
Example 5
Source File: CoBroadcastWithKeyedOperatorTest.java    From flink with Apache License 2.0 5 votes vote down vote up
@Test
public void testSideOutput() throws Exception {
	try (
			TwoInputStreamOperatorTestHarness<String, Integer, String> testHarness = getInitializedTestHarness(
					BasicTypeInfo.STRING_TYPE_INFO,
					new IdentityKeySelector<>(),
					new FunctionWithSideOutput())
	) {

		testHarness.processWatermark1(new Watermark(10L));
		testHarness.processWatermark2(new Watermark(10L));
		testHarness.processElement2(new StreamRecord<>(5, 12L));

		testHarness.processWatermark1(new Watermark(40L));
		testHarness.processWatermark2(new Watermark(40L));
		testHarness.processElement1(new StreamRecord<>("6", 13L));
		testHarness.processElement1(new StreamRecord<>("6", 15L));

		testHarness.processWatermark1(new Watermark(50L));
		testHarness.processWatermark2(new Watermark(50L));

		Queue<StreamRecord<String>> expectedBr = new ConcurrentLinkedQueue<>();
		expectedBr.add(new StreamRecord<>("BR:5 WM:10 TS:12", 12L));

		Queue<StreamRecord<String>> expectedNonBr = new ConcurrentLinkedQueue<>();
		expectedNonBr.add(new StreamRecord<>("NON-BR:6 WM:40 TS:13", 13L));
		expectedNonBr.add(new StreamRecord<>("NON-BR:6 WM:40 TS:15", 15L));

		TestHarnessUtil.assertOutputEquals(
				"Wrong Side Output",
				expectedBr,
				testHarness.getSideOutput(FunctionWithSideOutput.BROADCAST_TAG));

		TestHarnessUtil.assertOutputEquals(
				"Wrong Side Output",
				expectedNonBr,
				testHarness.getSideOutput(FunctionWithSideOutput.NON_BROADCAST_TAG));
	}
}
 
Example 6
Source File: ClassHierarchy.java    From tascalate-javaflow with Apache License 2.0 5 votes vote down vote up
int flattenHierarchy(Queue<TypeInfo> superclasses, 
                     SortedSet<InterfaceEntry> interfaces,
                     Set<String> ivisited,
                     int depth) throws IOException {
    
    int strength = initialStrength();
    if (!isInterface) {
        superclasses.add(this);
    }
    // Process superclass
    TypeInfo stype = superClass();
    if (null != stype) {
        stype.flattenHierarchy(superclasses, interfaces, ivisited, depth + 1);                 
    }
    // Process interfaces;
    TypeInfo[] itypes = interfaces();
    int size = itypes.length;
    for (int i = size - 1; i >= 0; i--) {
        TypeInfo itype = itypes[i];
        // From bottom to top, so append children
        strength += itype.flattenHierarchy(null, interfaces, ivisited, depth + 1);
    }
    
    if (isInterface) {
        if (!ivisited.contains(name)) {
            // skip if re-implemented on higher level
            // and first appears on lower (base) level
            interfaces.add(new InterfaceEntry(this, strength, depth));
            ivisited.add(name);
        }
        return strength;
    } else {
        return 0;
    }
}
 
Example 7
Source File: TreeMapReducer.java    From beakerx with Apache License 2.0 5 votes vote down vote up
private void createNextTreeLayer(TreeLayer treeLayer, Queue<TreeLayer> treeLayers) {
  TreeLayer newTreeLayer = createTreeLayer(treeLayer.getNodeLayers());
  if (!newTreeLayer.getNodeLayers().isEmpty()) {
    treeLayers.add(newTreeLayer);
    createNextTreeLayer(newTreeLayer, treeLayers);
  }
}
 
Example 8
Source File: ContinuousFileProcessingRescalingTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private void putElementsInQ(Queue<Object> res, Queue<Object> partial) {
	for (Object o : partial) {
		if (o instanceof Watermark) {
			continue;
		}
		res.add(o);
	}
}
 
Example 9
Source File: SearchBuilder.java    From gadtry with Apache License 2.0 5 votes vote down vote up
/**
 * 广度优先 Breadth first
 */
private static <E, R> void searchByBreadthFirst(
        Deque<Route<E, R>> routes,
        SearchContext<E, R> context,
        Route<E, R> beginNode)
{
    final Queue<Route<E, R>> nextNodes = new LinkedList<>();
    nextNodes.add(beginNode);

    Route<E, R> route;
    while ((route = nextNodes.poll()) != null) {
        for (Edge<E, R> edge : route.getLastNode().nextNodes()) {   //use stream.parallel();
            Route<E, R> newRoute = route.copy().add(edge).create();
            context.setLastRoute(newRoute);

            if (context.getNextRule().apply(newRoute)) {
                routes.add(newRoute);
                nextNodes.add(newRoute);
            }

            if (!context.getGlobalRule().apply(context)) {
                nextNodes.clear();
                return;
            }
        }
    }
}
 
Example 10
Source File: ResetFlowEventChainFactory.java    From cloudbreak with Apache License 2.0 5 votes vote down vote up
@Override
public Queue<Selectable> createFlowTriggerEventQueue(StackEvent event) {
    Queue<Selectable> flowEventChain = new ConcurrentLinkedQueue<>();
    flowEventChain.add(new StackEvent(CLUSTER_RESET_EVENT.event(), event.getResourceId(), event.accepted()));
    flowEventChain.add(new StartClusterSuccess(CLUSTER_INSTALL_EVENT.event(), event.getResourceId()));
    return flowEventChain;
}
 
Example 11
Source File: KafkaOpenMetadataEventConsumer.java    From egeria with Apache License 2.0 5 votes vote down vote up
private void addUnprocessedEvent(int partition, String topic, KafkaIncomingEvent event) {
    if (isAutoCommitEnabled) {
        return;
    }
    TopicPartition key = new TopicPartition(topic, partition);
    Queue<KafkaIncomingEvent> queue = unprocessedEventQueues.get(key);
    if (queue == null) {
        queue = new SynchronizedQueue<>(new ArrayDeque<KafkaIncomingEvent>());
        synchronized (unprocessedEventQueues) {
            unprocessedEventQueues.put(key, queue);
        }
    }
    queue.add(event);
    
}
 
Example 12
Source File: ConcurrentMessageDigest.java    From Tomcat7.0.67 with Apache License 2.0 5 votes vote down vote up
/**
 * Ensures that {@link #digest(String, byte[][])} will support the specified
 * algorithm. This method <b>must</b> be called and return successfully
 * before using {@link #digest(String, byte[][])}.
 *
 * @param algorithm The message digest algorithm to be supported
 *
 * @throws NoSuchAlgorithmException If the algorithm is not supported by the
 *                                  JVM
 */
public static void init(String algorithm) throws NoSuchAlgorithmException {
    synchronized (queues) {
        if (!queues.containsKey(algorithm)) {
            MessageDigest md = MessageDigest.getInstance(algorithm);
            Queue<MessageDigest> queue =
                    new ConcurrentLinkedQueue<MessageDigest>();
            queue.add(md);
            queues.put(algorithm, queue);
        }
    }
}
 
Example 13
Source File: AbstractNodeTree.java    From SoloPi with Apache License 2.0 5 votes vote down vote up
private MIterator(AbstractNodeTree root) {
    mQueue = new LinkedList<>();

    Queue<AbstractNodeTree> tmpQueue = new LinkedList<>();
    tmpQueue.add(root);
    AbstractNodeTree current = null;

    while ((current = tmpQueue.poll()) != null) {
        mQueue.add(current);
        if (current.getChildrenNodes() != null && current.getChildrenNodes().size() > 0) {
            tmpQueue.addAll(current.getChildrenNodes());
        }
    }
}
 
Example 14
Source File: DaxPathFinder.java    From Runescape-Web-Walker-Engine with Apache License 2.0 4 votes vote down vote up
public static Destination[][] getMap() {
    final RSTile home = Player.getPosition().toLocalTile();
    Destination[][] map = new Destination[104][104];
    int[][] collisionData = PathFinding.getCollisionData();
    if(collisionData == null || collisionData.length < home.getX() || collisionData[home.getX()].length < home.getY()){
        return map;
    }

    Queue<Destination> queue = new LinkedList<>();
    queue.add(new Destination(home, null, 0));
    map[home.getX()][home.getY()] = queue.peek();

    while (!queue.isEmpty()) {
        Destination currentLocal = queue.poll();

        int x = currentLocal.getLocalTile().getX(), y = currentLocal.getLocalTile().getY();
        Destination destination = map[x][y];

        for (Reachable.Direction direction : Reachable.Direction.values()) {
            if (!direction.isValidDirection(x, y, collisionData)) {
                continue; //Cannot traverse to tile from current.
            }

            RSTile neighbor = direction.getPointingTile(currentLocal.getLocalTile());
            int destinationX = neighbor.getX(), destinationY = neighbor.getY();

            if (!AStarNode.isWalkable(collisionData[destinationX][destinationY])) {
                continue;
            }

            if (map[destinationX][destinationY] != null) {
                continue; //Traversed already
            }

            map[destinationX][destinationY] = new Destination(neighbor, currentLocal, destination.getDistance() + 1);
            queue.add(map[destinationX][destinationY]);
        }

    }
    return map;
}
 
Example 15
Source File: RouterServerSSLTest.java    From ambry with Apache License 2.0 4 votes vote down vote up
/**
 * Test that the non blocking router can handle a large number of concurrent (small blob) operations without errors.
 * This test creates chains of operations without waiting for previous operations to finish.
 * @throws Exception
 */
@Test
public void interleavedOperationsTest() throws Exception {
  List<OperationChain> opChains = new ArrayList<>();
  Random random = new Random();
  for (int i = 0; i < 20; i++) {
    Queue<OperationType> operations = new LinkedList<>();
    switch (i % 3) {
      case 0:
        operations.add(OperationType.PUT);
        operations.add(OperationType.AWAIT_CREATION);
        operations.add(OperationType.GET_AUTHORIZATION_FAILURE);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_INFO);
        operations.add(OperationType.TTL_UPDATE);
        operations.add(OperationType.AWAIT_TTL_UPDATE);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_INFO);
        operations.add(OperationType.DELETE);
        operations.add(OperationType.AWAIT_DELETION);
        operations.add(OperationType.GET_DELETED);
        operations.add(OperationType.GET_INFO_DELETED);
        operations.add(OperationType.GET_DELETED_SUCCESS);
        operations.add(OperationType.GET_INFO_DELETED_SUCCESS);
        operations.add(OperationType.UNDELETE);
        operations.add(OperationType.AWAIT_UNDELETE);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_INFO);
        break;
      case 1:
        operations.add(OperationType.PUT);
        operations.add(OperationType.AWAIT_CREATION);
        operations.add(OperationType.DELETE_AUTHORIZATION_FAILURE);
        operations.add(OperationType.DELETE);
        operations.add(OperationType.AWAIT_DELETION);
        operations.add(OperationType.GET_DELETED);
        operations.add(OperationType.GET_INFO_DELETED);
        operations.add(OperationType.GET_DELETED);
        operations.add(OperationType.GET_INFO_DELETED);
        operations.add(OperationType.GET_DELETED_SUCCESS);
        operations.add(OperationType.GET_INFO_DELETED_SUCCESS);
        break;
      case 2:
        operations.add(OperationType.PUT);
        operations.add(OperationType.AWAIT_CREATION);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_AUTHORIZATION_FAILURE);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_INFO);
        operations.add(OperationType.TTL_UPDATE);
        operations.add(OperationType.AWAIT_TTL_UPDATE);
        operations.add(OperationType.GET);
        operations.add(OperationType.GET_INFO);
        break;
    }
    int blobSize = random.nextInt(100 * 1024);
    opChains.add(testFramework.startOperationChain(blobSize, null, i, operations));
  }
  testFramework.checkOperationChains(opChains);
}
 
Example 16
Source File: JavaCompiler.java    From openjdk-jdk8u with GNU General Public License v2.0 4 votes vote down vote up
/**
 * Perform dataflow checks on an attributed parse tree.
 */
protected void flow(Env<AttrContext> env, Queue<Env<AttrContext>> results) {
    if (compileStates.isDone(env, CompileState.FLOW)) {
        results.add(env);
        return;
    }

    try {
        if (shouldStop(CompileState.FLOW))
            return;

        if (relax) {
            results.add(env);
            return;
        }

        if (verboseCompilePolicy)
            printNote("[flow " + env.enclClass.sym + "]");
        JavaFileObject prev = log.useSource(
                                            env.enclClass.sym.sourcefile != null ?
                                            env.enclClass.sym.sourcefile :
                                            env.toplevel.sourcefile);
        try {
            make.at(Position.FIRSTPOS);
            TreeMaker localMake = make.forToplevel(env.toplevel);
            flow.analyzeTree(env, localMake);
            compileStates.put(env, CompileState.FLOW);

            if (shouldStop(CompileState.FLOW))
                return;

            results.add(env);
        }
        finally {
            log.useSource(prev);
        }
    }
    finally {
        if (!taskListener.isEmpty()) {
            TaskEvent e = new TaskEvent(TaskEvent.Kind.ANALYZE, env.toplevel, env.enclClass.sym);
            taskListener.finished(e);
        }
    }
}
 
Example 17
Source File: WordLadder2.java    From pengyifan-leetcode with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
public List<List<String>> findLadders(String start, String end,
    Set<String> dict) {
  if (start == null || end == null) {
    return Collections.emptyList();
  }

  dict.add(start);
  dict.add(end);
  Map<String, Set<String>> neighbours = createNeighbours(dict);

  List<List<String>> solutions = new ArrayList<List<String>>();

  // BFS search queue
  Queue<Node> queue = new LinkedList<Node>();
  queue.offer(new Node(null, start, 1));

  // BFS level
  int previousLevel = 0;
  // mark which nodes have been visited, to break infinite loop
  Map<String, Integer> visited = new HashMap<String, Integer>();
  while (!queue.isEmpty()) {
    Node n = queue.poll();
    if (end.equals(n.str)) {
      // fine one path, check its length, if longer than previous path it's
      // valid
      // otherwise all possible short path have been found, should stop
      if (previousLevel == 0 || n.level == previousLevel) {
        previousLevel = n.level;
        findPath(n, solutions);
      } else {
        // all path with length *previousLevel* have been found
        break;
      }
    } else {
      Set<String> neighbour = neighbours.get(n.str);
      if (neighbour == null || neighbour.isEmpty()) {
        continue;
      }
      Set<String> toRemove = new HashSet<String>();
      for (String word : neighbour) {
        // if word has been visited before at a smaller level, there is
        // already
        // a shorter path from start to word thus we should ignore word so as
        // to
        // break infinite loop; if on the same level, we still need to put it
        // into queue.
        if (visited.containsKey(word)) {
          if (n.level + 1 > visited.get(word)) {
            neighbours.get(word).remove(n.str);
            toRemove.add(word);
            continue;
          }
        }
        visited.put(word, n.level + 1);
        queue.add(new Node(n, word, n.level + 1));
        neighbours.get(word).remove(n.str);
      }
      neighbour.removeAll(toRemove);
    }
  }

  return solutions;
}
 
Example 18
Source File: CurriculaRequestsCourseDemands.java    From unitime with Apache License 2.0 4 votes vote down vote up
protected void computeTargetShare(CurriculumClassification clasf, Collection<CurriculumCourse> courses, CurriculumCourseGroupsProvider course2groups,int nrStudents, double factor, double w, CurModel model) {
	for (CurriculumCourse c1: courses) {
		double x1 = model.getCourse(c1.getCourse().getUniqueId()).getOriginalMaxSize();
		Set<CurriculumCourse>[] group = new HashSet[] { new HashSet<CurriculumCourse>(), new HashSet<CurriculumCourse>()};
		Queue<CurriculumCourse> queue = new LinkedList<CurriculumCourse>();
		queue.add(c1);
		Set<CurriculumCourseGroup> done = new HashSet<CurriculumCourseGroup>();
		while (!queue.isEmpty()) {
			CurriculumCourse c = queue.poll();
			for (CurriculumCourseGroup g: course2groups.getGroups(c))
				if (done.add(g))
					for (CurriculumCourse x: courses)
						if (!x.equals(c) && !x.equals(c1) && course2groups.getGroups(x).contains(g) && group[group[0].contains(c) ? 0 : g.getType()].add(x))
							queue.add(x);
		}
		for (CurriculumCourse c2: courses) {
			double x2 = model.getCourse(c2.getCourse().getUniqueId()).getOriginalMaxSize();
			boolean opt = group[0].contains(c2);
			boolean req = !opt && group[1].contains(c2);
			double defaultShare = (opt ? 0.0 : req ? Math.min(x1, x2) : c1.getPercShare() * c2.getPercShare() * nrStudents);
			if (c1.getUniqueId() >= c2.getUniqueId()) continue;
			double share = defaultShare;
			Set<WeightedStudentId> s1 = iStudentCourseRequests.getDemands(c1.getCourse());
			Set<WeightedStudentId> s2 = iStudentCourseRequests.getDemands(c2.getCourse());
			int sharedStudents = 0, registered = 0;
			if (s1 != null && !s1.isEmpty() && s2 != null && !s2.isEmpty()) {
				for (WeightedStudentId s: s1) {
					if (s.match(clasf)) {
						registered ++;
						if (s2.contains(s)) sharedStudents ++;
					}
				}
			}
			if (registered == 0) {
				share = (1.0 - w) * defaultShare;
			} else {
				share = w * (x1 / registered) * sharedStudents + (1.0 - w) * defaultShare;
			}
			model.setTargetShare(c1.getCourse().getUniqueId(), c2.getCourse().getUniqueId(), share, false);
		}
	}
}
 
Example 19
Source File: PersistentTreeMap.java    From Paguro with Apache License 2.0 4 votes vote down vote up
private void writeObject(ObjectOutputStream s) throws IOException {
            s.defaultWriteObject();

            // Serializing in iteration-order yields a worst-case deserialization because
            // without re-balancing (rotating nodes) such an order yields an completely unbalanced
            // linked list internal structure.
            //       4
            //      /
            //     3
            //    /
            //   2
            //  /
            // 1
            //
            // That seems unnecessary since before Serialization we might have something like this
            // which, while not perfect, requires no re-balancing:
            //
            //                    11
            //            ,------'  `----.
            //           8                14
            //        ,-' `-.            /  \
            //       4       9         13    15
            //    ,-' `-.     \       /        \
            //   2       6     10   12          16
            //  / \     / \
            // 1   3   5   7
            //
            // If we serialize the middle value (n/2) first.  Then the n/4 and 3n/4,
            // followed by n/8, 3n/8, 5n/8, 7n/8, then n/16, 3n/16, etc.  Finally, the odd-numbered
            // values last.  That gives us the order:
            // 8, 4, 12, 2, 6, 10, 14, 1, 3, 5, 7, 9, 11, 13, 15
            //
            // Deserializing in that order yields an ideally balanced tree without any shuffling:
            //               8
            //        ,-----' `-------.
            //       4                 12
            //    ,-' `-.          ,--'  `--.
            //   2       6       10          14
            //  / \     / \     /  \        /  \
            // 1   3   5   7   9    11    13    15
            //
            // That would be ideal, but I don't see how to do that without a significant
            // intermediate data structure.
            //
            // A good improvement could be made by serializing breadth-first instead of depth first
            // to at least yield a tree no worse than the original without requiring shuffling.
            //
            // This improvement does not change the serialized form, or break compatibility.
            // But it has a superior ordering for deserialization without (or with minimal)
            // rotations.

//            System.out.println("Serializing tree map...");
            if (theMap.tree != null) {
                Queue<Node<K,V>> queue = new ArrayDeque<>();
                queue.add(theMap.tree);
                while (queue.peek() != null) {
                    Node<K,V> node = queue.remove();
//                    System.out.println("Node: " + node);
                    s.writeObject(node.getKey());
                    s.writeObject(node.getValue());
                    Node<K,V> child = node.left();
                    if (child != null) {
                        queue.add(child);
                    }
                    child = node.right();
                    if (child != null) {
                        queue.add(child);
                    }
                }
            }
//            for (UnEntry<K,V> entry : theMap) {
//                s.writeObject(entry.getKey());
//                s.writeObject(entry.getValue());
//            }
        }
 
Example 20
Source File: RouteFinder.java    From tutorials with MIT License 4 votes vote down vote up
public List<T> findRoute(T from, T to) {
    Map<T, RouteNode<T>> allNodes = new HashMap<>();
    Queue<RouteNode> openSet = new PriorityQueue<>();

    RouteNode<T> start = new RouteNode<>(from, null, 0d, targetScorer.computeCost(from, to));
    allNodes.put(from, start);
    openSet.add(start);

    while (!openSet.isEmpty()) {
        System.out.println("Open Set contains: " + openSet.stream().map(RouteNode::getCurrent).collect(Collectors.toSet()));
        RouteNode<T> next = openSet.poll();
        System.out.println("Looking at node: " + next);
        if (next.getCurrent().equals(to)) {
            System.out.println("Found our destination!");

            List<T> route = new ArrayList<>();
            RouteNode<T> current = next;
            do {
                route.add(0, current.getCurrent());
                current = allNodes.get(current.getPrevious());
            } while (current != null);

            System.out.println("Route: " + route);
            return route;
        }

        graph.getConnections(next.getCurrent()).forEach(connection -> {
            double newScore = next.getRouteScore() + nextNodeScorer.computeCost(next.getCurrent(), connection);
            RouteNode<T> nextNode = allNodes.getOrDefault(connection, new RouteNode<>(connection));
            allNodes.put(connection, nextNode);

            if (nextNode.getRouteScore() > newScore) {
                nextNode.setPrevious(next.getCurrent());
                nextNode.setRouteScore(newScore);
                nextNode.setEstimatedScore(newScore + targetScorer.computeCost(connection, to));
                openSet.add(nextNode);
                System.out.println("Found a better route to node: " + nextNode);
            }
        });
    }

    throw new IllegalStateException("No route found");
}