下面列出了java.util.Collections#reverse ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private List<FeatureEntry> reweightCrossInformative(List<FeatureEntry> vp) {
List<FeatureEntry> clean = new ArrayList<FeatureEntry>();
for(int i = 0; i < vp.size(); i++){
int sourceid = vp.get(i).featureID;
String featureName = unlabel_s.getFeatureDB().getFeatureName(sourceid);
String targetName = translateFeature(featureName);
int targetid = unlabel_t.getFeatureDB().getFeature(targetName);
if(targetid!=-1){
double featDistorsionSim = crossDistorsionSim(sourceid, targetid);
double crossScore = vp.get(i).score*featDistorsionSim;
if(crossScore>0 && meaningfulTerm(featureName) && meaningfulTerm(targetName)){
FeatureEntry fe = new FeatureEntry(sourceid, crossScore);
clean.add(fe);
}
}
}
Collections.sort(clean);
Collections.reverse(clean);
return clean;
}
@Test
public void popupRelevantRevertedOrder() throws IOException {
try (PDDocument doc = PDFParser.parse(SeekableSources.inMemorySeekableSourceFrom(
getClass().getClassLoader().getResourceAsStream("pdf/popup_annotation.pdf")))) {
PDPage firstOrigin = doc.getPage(0);
List<PDAnnotation> annots = firstOrigin.getAnnotations();
Collections.reverse(annots);
firstOrigin.setAnnotations(annots);
PDPage firstNew = new PDPage();
lookup.addLookupEntry(firstOrigin, firstNew);
new AnnotationsDistiller(doc).retainRelevantAnnotations(lookup);
List<PDAnnotation> annotations = firstNew.getAnnotations();
assertFalse(annotations.isEmpty());
List<PDAnnotationMarkup> parent = annotations.stream().filter(a -> a instanceof PDAnnotationPopup)
.map(a -> ((PDAnnotationPopup) a).getParent()).collect(Collectors.toList());
assertEquals(1, parent.size());
assertTrue(annotations.contains(parent.get(0)));
}
}
public static List<String> gray(int n) {
if (n == 1) {
return Arrays.asList("0", "1");
}
List<String> original = gray(n - 1);
List<String> reversed = new ArrayList<>(original);
Collections.reverse(reversed);
return Stream.concat(original.stream().map(s -> "0" + s), reversed.stream().map(s -> "1" + s)).collect(toList());
}
private List<String> getFactoryIds(Repository<Entity> repository) {
List<String> factoryIds = new ArrayList<>();
EntityType entityType = repository.getEntityType();
do {
factoryIds.add(entityType.getId());
entityType = entityType.getExtends();
} while (entityType != null);
Collections.reverse(factoryIds);
return factoryIds;
}
/**
* 按照给定的表达式计算
* @param expression 要计算的表达式例如:5+12*(3+5)/7
* @return 返回计算结果
*/
private Object calculate(String expression) {
Stack<String> postfixStack = new Stack<String>();
Stack<Object> resultStack = new Stack<Object>();
prepare(expression,postfixStack);
Collections.reverse(postfixStack);// 将后缀式栈反转
while (!postfixStack.isEmpty()) {
String currentValue = postfixStack.pop();
if (currentValue.equals("") || !isOperator(currentValue.charAt(0))) {// 如果不是运算符则存入操作数栈中
if(currentValue.startsWith("\"")){
currentValue=currentValue.substring(1,currentValue.length()-1);
}
currentValue = currentValue.replace("~", "-");
resultStack.push(currentValue);
} else {// 如果是运算符则从操作数栈中取两个值和该数值一起参与运算
String secondValue = resultStack.pop().toString();
String firstValue = resultStack.pop().toString();
// 将负数标记符改为负号
firstValue = firstValue.replace("~", "-");
secondValue = secondValue.replace("~", "-");
Object tempResult = calculate(firstValue, secondValue, currentValue.charAt(0));
resultStack.push(tempResult);
}
}
return resultStack.pop();
}
Tx_in(ZCashTransactionOutput base) {
List<Byte> txbytes = Bytes.asList(Utils.hexToBytes(base.txid));
Collections.reverse(txbytes);
txid = Bytes.toArray(txbytes);
index = base.n;
script = Utils.hexToBytes(base.hex);
this.value = base.value;
}
private static List<String> generateScopes(ScopeListElement scopesList) {
List<String> result = new ArrayList<>();
while (scopesList != null) {
result.add(scopesList.scope);
scopesList = scopesList.parent;
}
Collections.reverse(result);
return result;
}
public void popStackBySignature(String signature) {
List<Type> types = Arrays.asList(Type.getArgumentTypes(signature));
Collections.reverse(types);
for (Type t : types) {
popStackChecked(t);
}
}
WaveAggregateOp invert() {
List<OpCreatorPair> invertedPairs = new ArrayList<OpCreatorPair>();
for (OpCreatorPair pair : opPairs) {
invertedPairs.add(new OpCreatorPair(pair.op.invert(), pair.creator));
}
Collections.reverse(invertedPairs);
return new WaveAggregateOp(invertedPairs);
}
@Override
public List<QualifiedRecommendation> getModifiedRecommendations(
File oldFile, File newFile, List<QualifiedRecommendation> originalRecommendations) {
List<QualifiedRecommendation> sorted =
new ArrayList<QualifiedRecommendation>(originalRecommendations);
Collections.sort(sorted, COMPARATOR);
Collections.reverse(sorted);
List<QualifiedRecommendation> result = new ArrayList<>(sorted.size());
long recompressibleBytesRemaining = maxBytesToRecompress;
for (QualifiedRecommendation originalRecommendation : sorted) {
if (!originalRecommendation.getRecommendation().uncompressNewEntry) {
// Keep the original recommendation, no need to track size since it won't be uncompressed.
result.add(originalRecommendation);
} else {
long bytesToRecompress = originalRecommendation.getNewEntry().getUncompressedSize();
if (recompressibleBytesRemaining - bytesToRecompress >= 0) {
// Keep the original recommendation, but also subtract from the remaining space.
result.add(originalRecommendation);
recompressibleBytesRemaining -= bytesToRecompress;
} else {
// Update the recommendation to prevent uncompressing this tuple.
result.add(
new QualifiedRecommendation(
originalRecommendation.getOldEntry(),
originalRecommendation.getNewEntry(),
Recommendation.UNCOMPRESS_NEITHER,
RecommendationReason.RESOURCE_CONSTRAINED));
}
}
}
return result;
}
static String getLastRecordedStateForVariable(String variableName) {
List<StateData> states = new ArrayList<StateData>(StateRegistry.getLocalVariableStates());
Collections.reverse(states);
for (StateData localVariableState : states) {
if (localVariableState.name.equals(variableName)) {
return localVariableState.value;
}
}
return null;
}
/**
* Reverse the migration events in a list, to convert from the natural sampling ordering (parent-to-child)
* to the natural coalescent ordering. On input, getColourAbove returns the colour of the branch *below*
* an event.
*/
protected final void reverseColourChangeList( List<ColourChange> colourChanges, int parentColour ) {
Collections.reverse( colourChanges );
int colour;
for (int i=0; i < colourChanges.size(); i++) {
if (i<colourChanges.size()-1) {
colour = (colourChanges.get( i+1 )).getColourAbove();
} else {
colour = parentColour;
}
(colourChanges.get( i )).setColourAbove( colour );
}
}
public final void mouseClickedBase(float mouseX, float mouseY, int button) {
List<Element> elements = new ArrayList<>(this.elements);
Collections.reverse(elements);
for (Element element : elements) {
if (element.mouseClicked(mouseX, mouseY, button)) {
break;
}
}
mouseClicked(mouseX, mouseY, button);
}
@GET
@ApiOperation(value = "Returns the last logged frontend messages. The amount is limited to the "
+ LogConstants.LOG_BUFFER_LIMIT + " last entries.")
@ApiParam(name = "limit", allowableValues = "range[1, " + LogConstants.LOG_BUFFER_LIMIT + "]")
public Response getLastLogs(@DefaultValue(LogConstants.LOG_BUFFER_LIMIT + "") @QueryParam("limit") Integer limit) {
if (LOG_BUFFER.size() <= 0) {
return Response.ok("[]").build();
}
int effectiveLimit;
if (limit == null || limit <= 0 || limit > LogConstants.LOG_BUFFER_LIMIT) {
effectiveLimit = LOG_BUFFER.size();
} else {
effectiveLimit = limit;
}
if (effectiveLimit >= LOG_BUFFER.size()) {
return Response.ok(LOG_BUFFER.toArray()).build();
} else {
final List<LogMessage> result = new ArrayList<>();
Iterator<LogMessage> iter = LOG_BUFFER.descendingIterator();
do {
result.add(iter.next());
} while (iter.hasNext() && result.size() < effectiveLimit);
Collections.reverse(result);
return Response.ok(result).build();
}
}
/**
* Method that returns a random list if concave GeoPolygons under given constraints. Method
* use to generate convex holes. Note that constraints for points and holes are different,
*
* @param planetModel The planet model.
* @param polygon The polygon where the holes are within.
* @param holeConstraints The given constraints that a hole must comply.
* @param pointConstraints The given constraints that a point must comply.
* @return The random generated GeoPolygon.
*/
private List<GeoPolygon> concavePolygonHoles(PlanetModel planetModel,
GeoPolygon polygon,
Constraints holeConstraints,
Constraints pointConstraints) {
int iterations =0;
int holesCount = random().nextInt(3) + 1;
List<GeoPolygon> holes = new ArrayList<>();
while (iterations < MAX_SHAPE_ITERATIONS) {
iterations++;
int vertexCount = random().nextInt(3) + 3;
List<GeoPoint> geoPoints = points(vertexCount, planetModel, pointConstraints);
if (geoPoints.size() < 3){
continue;
}
geoPoints = orderPoints(geoPoints);
GeoPolygon inversePolygon = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints);
//The convex polygon must be within
if (inversePolygon == null || polygon.getRelationship(inversePolygon) != GeoArea.WITHIN) {
continue;
}
//make it concave
Collections.reverse(geoPoints);
try {
GeoPolygon hole = GeoPolygonFactory.makeGeoPolygon(planetModel, geoPoints);
if (!holeConstraints.valid(hole) || isConvex(planetModel, hole)) {
continue;
}
holes.add(hole);
if (holes.size() == holesCount){
return holes;
}
pointConstraints.put(hole, GeoArea.DISJOINT);
} catch (IllegalArgumentException e) {
continue;
}
}
return holes;
}
public String reverseWords(String s) {
List<String> lst = Arrays.asList(s.trim().split("\\s+"));
Collections.reverse(lst);
return lst.stream().collect(Collectors.joining(" "));
}
private List<String> getInlineContext(Graph graph) {
List<String> result = inlineContextMap.get(graph);
if (result == null) {
result = new ArrayList<>();
Object lastMethodOrGraph = null;
boolean graphSeen = false;
for (Object o : Debug.context()) {
if (o == graph) {
graphSeen = true;
}
if (o instanceof DebugDumpScope) {
DebugDumpScope debugDumpScope = (DebugDumpScope) o;
if (debugDumpScope.decorator && !result.isEmpty()) {
result.set(result.size() - 1, debugDumpScope.name + ":" + result.get(result.size() - 1));
} else {
result.add(debugDumpScope.name);
}
} else {
addMethodContext(result, o, lastMethodOrGraph);
}
if (o instanceof JavaMethod || o instanceof Graph) {
lastMethodOrGraph = o;
}
}
if (result.isEmpty()) {
result.add(graph.toString());
graphSeen = true;
}
// Reverse list such that inner method comes after outer method.
Collections.reverse(result);
if (!graphSeen) {
/*
* The graph isn't in any context but is being processed within another graph so add
* it to the end of the scopes.
*/
if (asJavaMethod(graph) != null) {
addMethodContext(result, graph, lastMethodOrGraph);
} else {
result.add(graph.toString());
}
}
inlineContextMap.put(graph, result);
}
return result;
}
public static void dumpTasks(List<TaskMetrics> metrics,
String heading,
Comparator<TaskMetrics> comparator,
String fieldName,
boolean isTime)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException {
if (metrics.size() == 0) {
return;
}
System.out.println("== Tasks ordered by " + heading + " ==");
PaddedTable table = new PaddedTable();
table
.addColumnTitle("Type")
.addColumnTitle("TaskId")
.addColumnTitle("Status")
.addColumnTitle("Host")
.addColumnTitle("ExecutionTime")
.addColumnTitle("InputBytes")
.addColumnTitle("OutputBytes")
.addColumnTitle("InputRecords")
.addColumnTitle("OputputRecords");
Collections.sort(metrics, comparator);
Collections.reverse(metrics);
Long minVal = null;
Long maxVal = null;
long totalVals = 0;
for (TaskMetrics m : metrics) {
long v = extractLongFieldValue(m, fieldName);
minVal = minVal == null ? v : Math.min(minVal, v);
maxVal = maxVal == null ? v : Math.max(maxVal, v);
totalVals += v;
table.newRow();
table.addColumnValue(m.getType())
.addColumnValue(m.getTaskId())
.addColumnValue(m.getStatus())
.addColumnValue(m.getHost())
.addColumnValue(JobHistoryHelper.formatTime(
m.getOverallTimeMillis()))
.addColumnValue(m.getInputBytes())
.addColumnValue(m.getOutputBytes())
.addColumnValue(m.getInputRecords())
.addColumnValue(m.getOutputRecords());
}
System.out.println();
if (isTime) {
System.out
.println(String.format("Min/max/avg (HH:MM:SS) = %s/%s/%s",
JobHistoryHelper.formatTime(minVal),
JobHistoryHelper.formatTime(maxVal),
JobHistoryHelper.formatTime(totalVals / metrics.size())));
} else {
System.out.println(String.format("Min/max/avg = %d/%d/%d",
minVal, maxVal, totalVals / metrics.size()));
}
System.out.println();
System.out.println(table);
}
/**
* Returns the n annotations preceding the given annotation
*
* @param cas
* a CAS.
* @param type
* a UIMA type.
* @param annotation
* anchor annotation
* @param count
* number of annotations to collect
* @return List of aType annotations preceding anchor annotation
* @see <a href="package-summary.html#SortOrder">Order of selected feature structures</a>
*/
public static List<AnnotationFS> selectPreceding(CAS cas, Type type, AnnotationFS annotation,
int count) {
if (!cas.getTypeSystem().subsumes(cas.getAnnotationType(), type)) {
throw new IllegalArgumentException("Type [" + type.getName() + "] is not an annotation type");
}
List<AnnotationFS> precedingAnnotations = new ArrayList<AnnotationFS>();
// Seek annotation in index
// withSnapshotIterators() not needed here since we copy the FSes to a list anyway
FSIterator<AnnotationFS> itr = cas.getAnnotationIndex(type).iterator();
itr.moveTo(annotation);
// If the insertion point is beyond the index, move back to the last.
if (!itr.isValid()) {
itr.moveToLast();
if (!itr.isValid()) {
return precedingAnnotations;
}
}
// No need to do additional seeks here (as done in selectCovered) because the current method
// does not have to worry about type priorities - it never returns annotations that have
// the same offset as the reference annotation.
// make sure we're past the beginning of the reference annotation
while (itr.isValid() && itr.get().getEnd() > annotation.getBegin()) {
itr.moveToPrevious();
}
// add annotations from the iterator into the result list
for (int i = 0; i < count && itr.isValid(); itr.moveToPrevious()) {
AnnotationFS cur = itr.get();
if (cur.getEnd() <= annotation.getBegin()) {
precedingAnnotations.add(itr.get());
i++;
}
}
// return in correct order
Collections.reverse(precedingAnnotations);
return precedingAnnotations;
}
protected void addSortOperation(final UniTimeTableHeader header, final ChangeLogComparator.SortBy sort, final String column) {
Operation op = new SortOperation() {
@Override
public void execute() {
boolean asc = (header.getOrder() == null ? true : !header.getOrder());
if (iMaxTableLines > 0 && iSectioningActions.size() > iMaxTableLines) {
Collections.sort(iSectioningActions, new ChangeLogComparator(sort));
if (!asc) Collections.reverse(iSectioningActions);
fillLogTable(iSectioningActionsFirstLine);
for (int i = 0; i < iLogTable.getCellCount(0); i++) {
Widget w = iLogTable.getWidget(0, i);
if (w != null && w instanceof UniTimeTableHeader) {
UniTimeTableHeader h = (UniTimeTableHeader)w;
h.setOrder(null);
}
}
header.setOrder(asc);
} else {
iLogTable.sort(header, new ChangeLogComparator(sort));
}
SectioningStatusCookie.getInstance().setSortBy(iOnline, 2, header.getOrder() ? 1 + sort.ordinal() : -1 - sort.ordinal());
}
@Override
public boolean isApplicable() {
return true;
}
@Override
public boolean hasSeparator() {
return false;
}
@Override
public String getName() {
return MESSAGES.sortBy(column);
}
@Override
public String getColumnName() {
return column;
}
};
header.addOperation(op);
iSortOperations.add(op);
}