java.util.AbstractSet#java.util.ArrayDeque源码实例Demo

下面列出了java.util.AbstractSet#java.util.ArrayDeque 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: galleon   文件: FormattingXmlStreamWriter.java
public void writeStartElement(final String localName) throws XMLStreamException {
    ArrayDeque<String> namespaces = unspecifiedNamespaces;
    String namespace = namespaces.getFirst();
    if (namespace != NO_NAMESPACE) {
        writeStartElement(namespace, localName);
        return;
    }

    unspecifiedNamespaces.push(namespace);

    // If this is a nested element flush the outer
    nl();
    indent();
    delegate.writeStartElement(localName);

    level++;
    state = START_ELEMENT;
    indentEndElement = false;
}
 
源代码2 项目: firebase-android-sdk   文件: SyncEngine.java
public SyncEngine(
    LocalStore localStore,
    RemoteStore remoteStore,
    User initialUser,
    int maxConcurrentLimboResolutions) {
  this.localStore = localStore;
  this.remoteStore = remoteStore;
  this.maxConcurrentLimboResolutions = maxConcurrentLimboResolutions;

  queryViewsByQuery = new HashMap<>();
  queriesByTarget = new HashMap<>();

  enqueuedLimboResolutions = new ArrayDeque<>();
  activeLimboTargetsByKey = new HashMap<>();
  activeLimboResolutionsByTarget = new HashMap<>();
  limboDocumentRefs = new ReferenceSet();

  mutationUserCallbacks = new HashMap<>();
  targetIdGenerator = TargetIdGenerator.forSyncEngine();
  currentUser = initialUser;

  pendingWritesCallbacks = new HashMap<>();
}
 
源代码3 项目: doov   文件: AstHtmlRenderer.java
private void binary_SPACE(Metadata metadata, ArrayDeque<Metadata> parents) {
    final Optional<Metadata> pmd = parents.stream().skip(1).findFirst();
    final Operator pmdOperator = pmd.map(Metadata::getOperator).orElse(null);
    final boolean leftChild = pmd.map(m -> m.childAt(0) == metadata).orElse(false);
    if ((!leftChild || pmdOperator != or || metadata.getOperator() != and)) {
        // @see io.doov.core.dsl.meta.ast.HtmlAndTest.and_or_and()
        writer.writeExclusionBar(metadata, parents);
    }
    toHtml(metadata.childAt(0), parents);
    writer.write(SPACE);
    writer.writeBeginSpan(CSS_OPERATOR);
    writer.writeFromBundle(metadata.getOperator());
    writer.writeEndSpan();
    writer.write(SPACE);
    toHtml(metadata.childAt(1), parents);
}
 
/**
 * 双端队列
 *
 * @param nums
 * @param k
 * @return
 */
public int[] maxSlidingWindow(int[] nums, int k) {
    int len = nums.length;
    if (len == 0) {
        return new int[0];
    }
    int[] res = new int[len - k + 1];
    Deque<Integer> queue = new ArrayDeque<>();
    for (int i = 0; i < len; i++) {
        if (i >= k && queue.getFirst() == i - k) {
            queue.removeFirst();
        }
        while (!queue.isEmpty() && nums[queue.getLast()] <= nums[i]) {
            queue.removeLast();
        }
        queue.add(i);

        if (i >= k - 1) {
            res[i - k + 1] = nums[queue.getFirst()];
        }
    }
    return res;
}
 
源代码5 项目: patchwork-api   文件: BiomeDictionary.java
private static Collection<Type> listSupertypes(Type... types) {
	Set<Type> supertypes = new HashSet<>();
	Deque<Type> next = new ArrayDeque<>();
	Collections.addAll(next, types);

	while (!next.isEmpty()) {
		Type type = next.remove();

		for (Type sType : Type.BY_NAME.values()) {
			if (sType.subTypes.contains(type) && supertypes.add(sType)) {
				next.add(sType);
			}
		}
	}

	return supertypes;
}
 
源代码6 项目: yangtools   文件: SchemaContextUtil.java
private static @Nullable SchemaNode findTargetNode(final SchemaContext context, final QNameModule localNamespace,
        final YangLocationPath path) {
    final Deque<QName> ret = new ArrayDeque<>();
    for (Step step : path.getSteps()) {
        if (step instanceof AxisStep) {
            // We only support parent axis steps
            final YangXPathAxis axis = ((AxisStep) step).getAxis();
            checkState(axis == YangXPathAxis.PARENT, "Unexpected axis %s", axis);
            ret.removeLast();
            continue;
        }

        // This has to be a QNameStep
        checkState(step instanceof QNameStep, "Unhandled step %s in %s", step, path);
        ret.addLast(resolve(((QNameStep) step).getQName(), localNamespace));
    }

    return findTargetNode(context, ret);
}
 
源代码7 项目: Logistics-Pipes-2   文件: LPRoutedItem.java
public static LPRoutedItem readFromNBT(NBTTagCompound compound, TileGenericPipe holder) {
	double x = compound.getDouble("posX");
	double y = compound.getDouble("posY");
	double z = compound.getDouble("posZ");
	UUID id = compound.getUniqueId("UID");
	ItemStack content = new ItemStack(compound.getCompoundTag("inventory"));
	int ticks = compound.getInteger("ticks");
	Deque<EnumFacing> routingInfo = new ArrayDeque<>();
	NBTTagList routeList = (NBTTagList) compound.getTag("route");
	for(Iterator<NBTBase> i = routeList.iterator(); i.hasNext();) {
		NBTTagCompound node = (NBTTagCompound) i.next();
		EnumFacing nodeTuple = EnumFacing.values()[node.getInteger("heading")];
		routingInfo.add(nodeTuple);
	}
	LPRoutedItem item = new LPRoutedItem(x, y, z, content, ticks, id);
	item.setHeading(EnumFacing.VALUES[compound.getInteger("heading")]);
	item.setHolding(holder);
	item.route = routingInfo;
	return item;
}
 
源代码8 项目: Alink   文件: BisectingKMeansModelMapper.java
private void assignClusterId() {
    Queue<TreeNode> queue = new ArrayDeque<>();
    queue.add(root);
    long id = 0L;
    treeNodeIds = new ArrayList<>();

    while (!queue.isEmpty()) {
        TreeNode top = queue.poll();
        if (top.isLeaf()) {
            top.clusterId = id;
            treeNodeIds.add(top.treeNodeId);
            id++;
        } else {
            if (top.leftChild != null) {
                queue.add(top.leftChild);
            }
            if (top.rightChild != null) {
                queue.add(top.rightChild);
            }
        }
    }
}
 
/**
 * Test NegativeAcknowledger bulk action with a large batch
 */
@Test
public void testNackBulkActionLargeBatch() throws JMSException {

    /*
     * Set up the message queue
     */
    ArrayDeque<SQSMessageConsumerPrefetch.MessageManager> messageQueue = addSQSMessageToQueue(13);

    /*
     * Nack the messages in bulk actions
     */
    negativeAcknowledger.bulkAction(messageQueue, QUEUE_URL);

    /*
     * Verify results
     */
    verify(negativeAcknowledger, times(2)).action(eq(QUEUE_URL), anyList());
}
 
源代码10 项目: enmasse   文件: BlockingClient.java
public void send(String address, List<Message> messages, long timeout, TimeUnit timeUnit) throws InterruptedException {
    ProtonClient client = ProtonClient.create(vertx);
    CountDownLatch latch = new CountDownLatch(1);
    Queue<Message> messageQueue = new ArrayDeque<>(messages);
    client.connect(host, port, connectEvent -> {
        if (connectEvent.succeeded()) {
            ProtonConnection connection = connectEvent.result();
            connection.open();

            ProtonSender sender = connection.createSender(address);
            sender.openHandler(senderOpenEvent -> {
                if (senderOpenEvent.succeeded()) {
                    sendNext(connection, sender, messageQueue, latch);
                }
            });
            sender.open();
        }
    });
    boolean ok = latch.await(timeout, timeUnit);
    if (!ok) {
        throw new RuntimeException("Sending messages timed out, " + messageQueue.size() + " messages unsent");
    }
}
 
源代码11 项目: BIMserver   文件: QueryObjectProvider.java
public QueryObjectProvider(DatabaseSession databaseSession, BimServer bimServer, Query query, Set<Long> roids, PackageMetaData packageMetaData) throws IOException, QueryException {
	this.databaseSession = databaseSession;
	this.bimServer = bimServer;
	this.query = query;
	this.roids = roids;
	this.packageMetaData = packageMetaData;
	
	stack = new ArrayDeque<StackFrame>();
	stack.push(new StartFrame(this, roids));
	
	for (QueryPart queryPart : query.getQueryParts()) {
		if (queryPart.hasOids()) {
			goingToRead.addAll(queryPart.getOids());
		}
	}
}
 
源代码12 项目: Flink-CEPplus   文件: RecordWriterTest.java
/**
 * Tests that broadcasted events' buffers are independent (in their (reader) indices) once they
 * are put into the queue for Netty when broadcasting events to multiple channels.
 */
@Test
public void testBroadcastEventBufferIndependence() throws Exception {
	@SuppressWarnings("unchecked")
	ArrayDeque<BufferConsumer>[] queues =
		new ArrayDeque[]{new ArrayDeque(), new ArrayDeque()};

	ResultPartitionWriter partition =
		new CollectingPartitionWriter(queues, new TestPooledBufferProvider(Integer.MAX_VALUE));
	RecordWriter<?> writer = new RecordWriter<>(partition);

	writer.broadcastEvent(EndOfPartitionEvent.INSTANCE);

	// Verify added to all queues
	assertEquals(1, queues[0].size());
	assertEquals(1, queues[1].size());

	// these two buffers may share the memory but not the indices!
	Buffer buffer1 = buildSingleBuffer(queues[0].remove());
	Buffer buffer2 = buildSingleBuffer(queues[1].remove());
	assertEquals(0, buffer1.getReaderIndex());
	assertEquals(0, buffer2.getReaderIndex());
	buffer1.setReaderIndex(1);
	assertEquals("Buffer 2 shares the same reader index as buffer 1", 0, buffer2.getReaderIndex());
}
 
源代码13 项目: Cubes   文件: SunLight.java
private static void propagateRemove(ArrayDeque<LightNode> removeQueue, ArrayDeque<LightNode> addQueue, LightWorldSection w) {
  if (removeQueue.isEmpty()) return;

  while (!removeQueue.isEmpty()) {
    LightNode n = removeQueue.pop();
    int x = n.x;
    int y = n.y;
    int z = n.z;
    int l = n.l;

    if (l <= 1) continue;

    tryPropagateRemove(removeQueue, addQueue, w, x - 1, y, z, l);
    tryPropagateRemove(removeQueue, addQueue, w, x + 1, y, z, l);
    tryPropagateRemove(removeQueue, addQueue, w, x, y, z - 1, l);
    tryPropagateRemove(removeQueue, addQueue, w, x, y, z + 1, l);
    if (y > 0)
      tryPropagateRemove(removeQueue, addQueue, w, x, y - 1, z, 16); //16 is higher than maximum light, therefore the sunlight is always removed
    tryPropagateRemove(removeQueue, addQueue, w, x, y + 1, z, l);
  }
}
 
源代码14 项目: onos   文件: VplsOperationManagerTest.java
/**
 * Optimize operations with first is REMOVE operation and last is ADD
 * operation.
 */
@Test
public void testOptimizeOperationsRToA() {
    Deque<VplsOperation> operations = new ArrayDeque<>();
    VplsData vplsData = VplsData.of(VPLS1);
    vplsData.addInterfaces(ImmutableSet.of(V100H1));
    VplsOperation vplsOperation = VplsOperation.of(vplsData,
                                                   VplsOperation.Operation.REMOVE);
    operations.add(vplsOperation);
    vplsData = VplsData.of(VPLS1, EncapsulationType.VLAN);
    vplsData.addInterfaces(ImmutableSet.of(V100H1, V100H2));
    vplsOperation = VplsOperation.of(vplsData,
                                     VplsOperation.Operation.ADD);
    operations.add(vplsOperation);
    vplsOperation = VplsOperationManager.getOptimizedVplsOperation(operations);
    assertEquals(VplsOperation.of(vplsData, VplsOperation.Operation.UPDATE), vplsOperation);
}
 
源代码15 项目: DDMQ   文件: UpstreamJobBuffer.java
public synchronized void clearTerminatedJobs() {
    Iterator<Map.Entry<Long, UpstreamJob>> itr = workingJobs.entrySet().iterator();
    while (itr.hasNext()) {
        UpstreamJob job = itr.next().getValue();
        if (job.isTerminated()) {
            job.terminate();
            itr.remove();
        }
    }
    Queue<UpstreamJob> oldQueue = queue;
    queue = new ArrayDeque<>();
    oldQueue.forEach(job -> {
        if (!job.isTerminated()) {
            queue.add(job);
        }
    });
}
 
源代码16 项目: grakn   文件: ChuLiuEdmonds.java
public static PartialSolution initialize(WeightedGraph graph) {
    final Partition<Node> stronglyConnected = Partition.singletons(graph.getNodes());
    final HashMap<Node, Weighted<DirectedEdge>> incomingByScc = new HashMap<>();
    final Deque<ExclusiveEdge> exclusiveEdges = new ArrayDeque<>();
    // group edges by their destination component
    final EdgeQueueMap incomingEdges = new EdgeQueueMap(stronglyConnected);
    for (Node destinationNode : graph.getNodes()) {
        for (Weighted<DirectedEdge> inEdge : graph.getIncomingEdges(destinationNode)) {
            if (inEdge.weight != Double.NEGATIVE_INFINITY) {
                incomingEdges.addEdge(inEdge);
            }
        }
    }
    return new PartialSolution(
            stronglyConnected,
            Partition.singletons(graph.getNodes()),
            incomingByScc,
            exclusiveEdges,
            incomingEdges,
            0.0
    );
}
 
/**
 * Helper method for import declarations, names, and qualified names.
 */
private void visitName(Tree node) {
    Deque<Name> stack = new ArrayDeque<>();
    for (; node instanceof MemberSelectTree; node = ((MemberSelectTree) node).getExpression()) {
        stack.addFirst(((MemberSelectTree) node).getIdentifier());
    }
    stack.addFirst(((IdentifierTree) node).getName());
    boolean first = true;
    for (Name name : stack) {
        if (!first) {
            token(".");
        }
        token(name.toString());
        first = false;
    }
}
 
源代码18 项目: lucene-solr   文件: Operations.java
/** Returns bitset marking states reachable from the initial state. */
private static BitSet getLiveStatesFromInitial(Automaton a) {
  int numStates = a.getNumStates();
  BitSet live = new BitSet(numStates);
  if (numStates == 0) {
    return live;
  }
  ArrayDeque<Integer> workList = new ArrayDeque<>();
  live.set(0);
  workList.add(0);

  Transition t = new Transition();
  while (workList.isEmpty() == false) {
    int s = workList.removeFirst();
    int count = a.initTransition(s, t);
    for(int i=0;i<count;i++) {
      a.getNextTransition(t);
      if (live.get(t.dest) == false) {
        live.set(t.dest);
        workList.add(t.dest);
      }
    }
  }

  return live;
}
 
/**
 * Stack.
 * Iterate through the string characters.
 * If it's a number, just push to stack.
 * If it's a '#', we need to figure out some sub situations:
 * 1) If the top of the stack is a number, then this '#' is the left child, just push it.
 * 2) If the top of the stack is a '#', then this '#' is the right child, we should pop the subtree.
 * 2.1) After the subtree is popped, if the stack top is still '#', it means the subtree should be popped again.
 * 2.2) If the stack top is a number, we need to add a '#' to mark that the next node knows it's a right child.
 * https://discuss.leetcode.com/topic/35973/java-intuitive-22ms-solution-with-stack
 */
public boolean isValidSerialization(String preorder) {
  Deque<String> stack = new ArrayDeque<>();
  String[] nodes = preorder.split(",");
  for (int i = 0; i < nodes.length; i++) {
    String curr = nodes[i];
    while ("#".equals(curr) && !stack.isEmpty() && "#".equals(stack.peek())) {
      stack.pop();
      if (stack.isEmpty()) {
        return false;
      }
      stack.pop();
    }
    stack.push(curr);
  }
  return stack.size() == 1 && "#".equals(stack.peek());
}
 
源代码20 项目: netbeans   文件: NbProjectManager.java
private <E extends Exception> void saveProjects(@NonNull final Class<E> clz) throws E {
    final Queue<Exception> causes = new ArrayDeque<Exception>();
    for (Project prj : projects) {
        try {
            owner.saveProject(prj);
        } catch (IOException ioe) {
            causes.add(ioe);
        }
    }
    if (!causes.isEmpty()) {
        try {
            final E exc = clz.getDeclaredConstructor().newInstance();
            for (Exception cause : causes) {
                exc.addSuppressed(cause);
            }
            throw  exc;
        } catch (ReflectiveOperationException e) {
            throw new IllegalStateException(e);
        }
    }
}
 
@Test
public void testReadFinishedInput() throws Exception {
	try {
		testInputSelection(new TestReadFinishedInputStreamOperatorFactory(), true, new ArrayDeque<>(), true);
		fail("should throw an IOException");
	} catch (Exception t) {
		if (!ExceptionUtils.findThrowableWithMessage(t, "Can not make a progress: all selected inputs are already finished").isPresent()) {
			throw t;
		}
	}
}
 
源代码22 项目: sis   文件: TransformingWriter.java
/**
 * Creates a new writer for the given version of the standards.
 */
TransformingWriter(final XMLEventWriter out, final TransformVersion version) {
    super(version);
    this.out = out;
    uniqueNamespaces = new LinkedHashMap<>();
    deferred = new ArrayDeque<>();
}
 
源代码23 项目: Telegram   文件: FragmentedMp4Extractor.java
/**
 * @param flags Flags that control the extractor's behavior.
 * @param timestampAdjuster Adjusts sample timestamps. May be null if no adjustment is needed.
 * @param sideloadedTrack Sideloaded track information, in the case that the extractor will not
 *     receive a moov box in the input data. Null if a moov box is expected.
 * @param sideloadedDrmInitData The {@link DrmInitData} to use for encrypted tracks. If null, the
 *     pssh boxes (if present) will be used.
 * @param closedCaptionFormats For tracks that contain SEI messages, the formats of the closed
 *     caption channels to expose.
 * @param additionalEmsgTrackOutput An extra track output that will receive all emsg messages
 *     targeting the player, even if {@link #FLAG_ENABLE_EMSG_TRACK} is not set. Null if special
 *     handling of emsg messages for players is not required.
 */
public FragmentedMp4Extractor(
    @Flags int flags,
    @Nullable TimestampAdjuster timestampAdjuster,
    @Nullable Track sideloadedTrack,
    @Nullable DrmInitData sideloadedDrmInitData,
    List<Format> closedCaptionFormats,
    @Nullable TrackOutput additionalEmsgTrackOutput) {
  this.flags = flags | (sideloadedTrack != null ? FLAG_SIDELOADED : 0);
  this.timestampAdjuster = timestampAdjuster;
  this.sideloadedTrack = sideloadedTrack;
  this.sideloadedDrmInitData = sideloadedDrmInitData;
  this.closedCaptionFormats = Collections.unmodifiableList(closedCaptionFormats);
  this.additionalEmsgTrackOutput = additionalEmsgTrackOutput;
  eventMessageEncoder = new EventMessageEncoder();
  atomHeader = new ParsableByteArray(Atom.LONG_HEADER_SIZE);
  nalStartCode = new ParsableByteArray(NalUnitUtil.NAL_START_CODE);
  nalPrefix = new ParsableByteArray(5);
  nalBuffer = new ParsableByteArray();
  scratchBytes = new byte[16];
  scratch = new ParsableByteArray(scratchBytes);
  containerAtoms = new ArrayDeque<>();
  pendingMetadataSampleInfos = new ArrayDeque<>();
  trackBundles = new SparseArray<>();
  durationUs = C.TIME_UNSET;
  pendingSeekTimeUs = C.TIME_UNSET;
  segmentIndexEarliestPresentationTimeUs = C.TIME_UNSET;
  enterReadingAtomHeaderState();
}
 
源代码24 项目: bazel   文件: ProtoResourceUsageAnalyzer.java
private Set<Resource> findReachableResources(List<Resource> roots) {
  final Multimap<Resource, Resource> referenceLog = LinkedHashMultimap.create();
  Deque<Resource> queue = new ArrayDeque<>(roots);
  final Set<Resource> reachable = new LinkedHashSet<>();
  while (!queue.isEmpty()) {
    Resource resource = queue.pop();
    if (resource.references != null) {
      resource.references.forEach(
          r -> {
            referenceLog.put(r, resource);
            // add if it has not been marked reachable, therefore processed.
            if (!reachable.contains(r)) {
              queue.add(r);
            }
          });
    }
    // if we see it, it is reachable.
    reachable.add(resource);
  }

  // dump resource reference map:
  final StringBuilder keptResourceLog = new StringBuilder();
  referenceLog
      .asMap()
      .forEach(
          (resource, referencesTo) ->
              keptResourceLog
                  .append(printResource(resource))
                  .append(" => [")
                  .append(
                      referencesTo.stream()
                          .map(ProtoResourceUsageAnalyzer::printResource)
                          .collect(joining(", ")))
                  .append("]\n"));

  logger.fine("Kept resource references:\n" + keptResourceLog);

  return reachable;
}
 
源代码25 项目: egeria   文件: KafkaOpenMetadataEventConsumer.java
private void addUnprocessedEvent(int partition, String topic, KafkaIncomingEvent event) {
    if (isAutoCommitEnabled) {
        return;
    }
    TopicPartition key = new TopicPartition(topic, partition);
    Queue<KafkaIncomingEvent> queue = unprocessedEventQueues.get(key);
    if (queue == null) {
        queue = new SynchronizedQueue<>(new ArrayDeque<KafkaIncomingEvent>());
        synchronized (unprocessedEventQueues) {
            unprocessedEventQueues.put(key, queue);
        }
    }
    queue.add(event);
    
}
 
源代码26 项目: Flink-CEPplus   文件: MemoryManager.java
HybridOffHeapMemoryPool(int numInitialSegments, int segmentSize) {
	this.availableMemory = new ArrayDeque<>(numInitialSegments);
	this.segmentSize = segmentSize;

	for (int i = 0; i < numInitialSegments; i++) {
		this.availableMemory.add(ByteBuffer.allocateDirect(segmentSize));
	}
}
 
public List<List<Integer>> combinationSum2(int[] candidates, int target) {
    List<List<Integer>> res = new ArrayList<>();
    Arrays.sort(candidates);
    int len = candidates.length;
    dfs(candidates, target, 0, len, new ArrayDeque<>(len), res);
    return res;
}
 
/**
 * Calculates the destination file.
 *
 * @param resource the source file
 * @return the destination file's parent directory
 */
private File getDestinationFile( File resource )
{
    File parent = resource.getParentFile();
    Deque<String> fileComponentStack = new ArrayDeque<>();
    fileComponentStack.push( resource.getName() );

    while ( parent != null )
    {
        if ( "schema".equals( parent.getName() ) )
        {
            // All LDIF files besides the schema.ldif are under the 
            // schema/schema base path. So we need to add one more 
            // schema component to all LDIF files minus this schema.ldif
            fileComponentStack.push( "schema" );

            return assembleDestinationFile( fileComponentStack );
        }

        fileComponentStack.push( parent.getName() );

        if ( parent.equals( parent.getParentFile() ) || parent.getParentFile() == null )
        {
            throw new IllegalStateException( I18n.err( I18n.ERR_16004_ROOT_WITHOUT_SCHEMA ) );
        }

        parent = parent.getParentFile();
    }

    throw new IllegalStateException( I18n.err( I18n.ERR_16005_PARENT_NULL ) );
}
 
源代码29 项目: LeetCode-Solution-in-Good-Style   文件: Solution.java
public static void main(String[] args) {
    int[] nums = {0, 0, 0, 0};
    Solution solution3 = new Solution();
    List<List<Integer>> res = solution3.threeSum(nums);
    res.forEach(System.out::println);


    List<Integer> res1 = new ArrayList<>();

    Deque<Integer> stack = new ArrayDeque<Integer>();
}
 
源代码30 项目: openjdk-jdk9   文件: ConsString.java
private synchronized void flatten(final boolean flattenNested) {
    // We use iterative traversal as recursion may exceed the stack size limit.
    final char[] chars = new char[length];
    int pos = length;
    // Strings are most often composed by appending to the end, which causes ConsStrings
    // to be very unbalanced, with mostly single string elements on the right and a long
    // linear list on the left. Traversing from right to left helps to keep the stack small
    // in this scenario.
    final Deque<CharSequence> stack = new ArrayDeque<>();
    stack.addFirst(left);
    CharSequence cs = right;

    do {
        if (cs instanceof ConsString) {
            final ConsString cons = (ConsString) cs;
            // Count the times a cons-string is traversed as part of other cons-strings being flattened.
            // If it crosses a threshold we flatten the nested cons-string internally.
            if (cons.state == STATE_FLATTENED || (flattenNested && ++cons.state >= STATE_THRESHOLD)) {
                cs = cons.flattened(false);
            } else {
                stack.addFirst(cons.left);
                cs = cons.right;
            }
        } else {
            final String str = (String) cs;
            pos -= str.length();
            str.getChars(0, str.length(), chars, pos);
            cs = stack.isEmpty() ? null : stack.pollFirst();
        }
    } while (cs != null);

    left = new String(chars);
    right = "";
    state = STATE_FLATTENED;
}