下面列出了java.util.TreeMap#descendingKeySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* descendingKeySet is ordered
*/
public void testDescendingKeySetOrder() {
TreeMap map = map5();
Set s = map.descendingKeySet();
Iterator i = s.iterator();
Integer last = (Integer)i.next();
assertEquals(last, five);
int count = 1;
while (i.hasNext()) {
Integer k = (Integer)i.next();
assertTrue(last.compareTo(k) > 0);
last = k;
++count;
}
assertEquals(5, count);
}
/**
* descending iterator of descendingKeySet is ordered
*/
public void testDescendingKeySetDescendingIteratorOrder() {
TreeMap map = map5();
NavigableSet s = map.descendingKeySet();
Iterator i = s.descendingIterator();
Integer last = (Integer)i.next();
assertEquals(last, one);
int count = 1;
while (i.hasNext()) {
Integer k = (Integer)i.next();
assertTrue(last.compareTo(k) < 0);
last = k;
++count;
}
assertEquals(5, count);
}
/**
* Filters the given ranking of recommendations and deletes items that already
* are in the cart. Furthermore caps the recommendations and only uses the
* {@link AbstractRecommender#MAX_NUMBER_OF_RECOMMENDATIONS} highest rated
* recommendations.
*
* @param priorityList
* The unfiltered ranking assigning each recommended product ID a
* score or an importance. Does not need to be sorted.
* @param currentItems
* The list of item IDs that must NOT be contained in the returned
* list.
* @return A sorted list of recommendations with a size not greater than
* {@link AbstractRecommender#MAX_NUMBER_OF_RECOMMENDATIONS}
*/
protected List<Long> filterRecommendations(Map<Long, Double> priorityList, List<Long> currentItems) {
TreeMap<Double, List<Long>> ranking = createRanking(priorityList);
List<Long> reco = new ArrayList<>(MAX_NUMBER_OF_RECOMMENDATIONS);
for (Double score : ranking.descendingKeySet()) {
List<Long> productIds = ranking.get(score);
for (long productId : productIds) {
if (reco.size() < MAX_NUMBER_OF_RECOMMENDATIONS) {
if (!currentItems.contains(productId)) {
reco.add(productId);
}
} else {
return reco;
}
}
}
return reco;
}
/**
* descendingKeySet is ordered
*/
public void testDescendingKeySetOrder() {
TreeMap map = map5();
Set s = map.descendingKeySet();
Iterator i = s.iterator();
Integer last = (Integer)i.next();
assertEquals(last, five);
int count = 1;
while (i.hasNext()) {
Integer k = (Integer)i.next();
assertTrue(last.compareTo(k) > 0);
last = k;
++count;
}
assertEquals(5, count);
}
/**
* descending iterator of descendingKeySet is ordered
*/
public void testDescendingKeySetDescendingIteratorOrder() {
TreeMap map = map5();
NavigableSet s = map.descendingKeySet();
Iterator i = s.descendingIterator();
Integer last = (Integer)i.next();
assertEquals(last, one);
int count = 1;
while (i.hasNext()) {
Integer k = (Integer)i.next();
assertTrue(last.compareTo(k) < 0);
last = k;
++count;
}
assertEquals(5, count);
}
private List<Migration> findUpMigrationPath(List<Migration> result, boolean upgrade,
int start, int end) {
while (upgrade ? start < end : start > end) {
TreeMap<Integer, Migration> targetNodes = mMigrations.get(start);
if (targetNodes == null) {
return null;
}
// keys are ordered so we can start searching from one end of them.
Set<Integer> keySet;
if (upgrade) {
keySet = targetNodes.descendingKeySet();
} else {
keySet = targetNodes.keySet();
}
boolean found = false;
for (int targetVersion : keySet) {
final boolean shouldAddToPath;
if (upgrade) {
shouldAddToPath = targetVersion <= end && targetVersion > start;
} else {
shouldAddToPath = targetVersion >= end && targetVersion < start;
}
if (shouldAddToPath) {
result.add(targetNodes.get(targetVersion));
start = targetVersion;
found = true;
break;
}
}
if (!found) {
return null;
}
}
return result;
}
/**
* descendingkeySet.toArray returns contains all keys
*/
public void testDescendingKeySetToArray() {
TreeMap map = map5();
Set s = map.descendingKeySet();
Object[] ar = s.toArray();
assertEquals(5, ar.length);
assertTrue(s.containsAll(Arrays.asList(ar)));
ar[0] = m10;
assertFalse(s.containsAll(Arrays.asList(ar)));
}
/**
* Add a block of features to the buffer.
*
* @param features TreeMap with the features for one rule.
* @return The index of the resulting data block.
*/
@Override
int add(TreeMap<Integer, Float> features) {
int data_position = buffer.position();
// Over-estimate how much room this addition will need: for each
// feature (ID_SIZE for label, "upper bound" of 4 for the value), plus ID_SIZE for
// the number of features. If this won't fit, reallocate the buffer.
int size_estimate = (4 + EncoderConfiguration.ID_SIZE) * features.size()
+ EncoderConfiguration.ID_SIZE;
if (buffer.capacity() - buffer.position() <= size_estimate)
reallocate();
// Write features to buffer.
idEncoder.write(buffer, features.size());
for (Integer k : features.descendingKeySet()) {
float v = features.get(k);
// Sparse features.
if (v != 0.0) {
idEncoder.write(buffer, k);
encoderConfig.encoder(k).write(buffer, v);
}
}
// Store position the block was written to.
memoryLookup.add(data_position);
// Update total size (in bytes).
totalSize = buffer.position();
// Return block index.
return memoryLookup.size() - 1;
}
/**
* Descendingly sort the keys of the map and return
* them in order, but eliminate the very smallest key
* @param stability
* @return
*/
protected static <T,P> ArrayList<T> descSortedKeySet(TreeMap<T,P> stability) {
int ct = 0;
ArrayList<T> nodeList = new ArrayList<>();
for(T d: stability.descendingKeySet())
if(++ct < stability.size()) // exclude the root...
nodeList.add(d);
return nodeList;
}
/**
* descendingkeySet.toArray returns contains all keys
*/
public void testDescendingKeySetToArray() {
TreeMap map = map5();
Set s = map.descendingKeySet();
Object[] ar = s.toArray();
assertEquals(5, ar.length);
assertTrue(s.containsAll(Arrays.asList(ar)));
ar[0] = m10;
assertFalse(s.containsAll(Arrays.asList(ar)));
}
private void packEventPointsIntoFingerprints(){
int size = Config.getInt(Key.NFFT_SIZE);
FFT fft = new FFT(size);
float[] binStartingPointsInCents = new float[size];
float[] binHeightsInCents = new float[size];
for (int i = 1; i < size; i++) {
binStartingPointsInCents[i] = (float) PitchConverter.hertzToAbsoluteCent(fft.binToHz(i,sampleRate));
binHeightsInCents[i] = binStartingPointsInCents[i] - binStartingPointsInCents[i-1];
}
float frameDurationInMS = Config.getInt(Key.NFFT_STEP_SIZE)/ ((float) Config.getInt(Key.NFFT_SAMPLE_RATE)) * 1000.f;
int maxEventPointDeltaTInMs = 2000; //two seconds
int maxEventPointDeltaFInCents = 1800; //1.5 octave
int minEventPointDeltaTInMs = 60;//milliseconds
//Collections.shuffle(eventPoints);
TreeMap<Float,NFFTFingerprint> printsOrderedByEnergy = new TreeMap<Float,NFFTFingerprint>();
//int countPrint = 0;
//Pack the event points into fingerprints
for(int i = 0; i < eventPoints.size();i++){
int t1 = eventPoints.get(i).t;
float f1 = binStartingPointsInCents[eventPoints.get(i).f];
//int maxtFirstLevel = t1 + maxEventPointDeltaTInSteps;
float maxfFirstLevel = f1 + maxEventPointDeltaFInCents;
float minfFirstLevel = f1 - maxEventPointDeltaFInCents;
for(int j = 0; j < eventPoints.size() ;j++){
int t2 = eventPoints.get(j).t;
float f2 = binStartingPointsInCents[eventPoints.get(j).f];
if(t1 < t2 && f1 != f2 && Math.abs(t2-t1) * frameDurationInMS > minEventPointDeltaTInMs && Math.abs(t2-t1) * frameDurationInMS < maxEventPointDeltaTInMs && f2 > minfFirstLevel && f2 < maxfFirstLevel){
float energy = eventPoints.get(i).contrast + eventPoints.get(j).contrast;
NFFTFingerprint fingerprint;
fingerprint = new NFFTFingerprint(eventPoints.get(i),eventPoints.get(j));
fingerprint.energy = energy;
printsOrderedByEnergy.put(energy,fingerprint);
//countPrint++;
}
}
}
//System.out.println(countPrint + " prints created, stored : " + printsOrderedByEnergy.size());
//countPrint=0;
int maxPrintsPerPoint = Config.getInt(Key.NFFT_MAX_FINGERPRINTS_PER_EVENT_POINT);
HashMap<NFFTEventPoint,Integer> printsPerPoint = new HashMap<NFFTEventPoint, Integer>();
for(int i = 0; i < eventPoints.size();i++){
printsPerPoint.put(eventPoints.get(i), 0);
}
for(Float key: printsOrderedByEnergy.descendingKeySet()){
NFFTFingerprint print = printsOrderedByEnergy.get(key);
if(printsPerPoint.get(print.p1)<maxPrintsPerPoint && printsPerPoint.get(print.p2)<maxPrintsPerPoint){
printsPerPoint.put(print.p1,printsPerPoint.get(print.p1)+1);
printsPerPoint.put(print.p2,printsPerPoint.get(print.p2)+1);
fingerprints.add(print);
//countPrint++;
}
}
//System.out.println(countPrint + " prints created");
}
private void packEventPointsIntoFingerprints(){
int size = Config.getInt(Key.NFFT_SIZE);
FFT fft = new FFT(size);
float[] binStartingPointsInCents = new float[size];
float[] binHeightsInCents = new float[size];
for (int i = 1; i < size; i++) {
binStartingPointsInCents[i] = (float) PitchConverter.hertzToAbsoluteCent(fft.binToHz(i,sampleRate));
binHeightsInCents[i] = binStartingPointsInCents[i] - binStartingPointsInCents[i-1];
}
float frameDurationInMS = Config.getInt(Key.NFFT_STEP_SIZE)/ ((float) Config.getInt(Key.NFFT_SAMPLE_RATE)) * 1000.f;
int maxEventPointDeltaTInMs = 2000; //two seconds
int maxEventPointDeltaFInCents = 1800; //1.5 octave
int minEventPointDeltaTInMs = 60;//milliseconds
//Collections.shuffle(eventPoints);
TreeMap<Float,QIFFTFingerprint> printsOrderedByEnergy = new TreeMap<Float,QIFFTFingerprint>();
//int countPrint = 0;
//Pack the event points into fingerprints
for(int i = 0; i < eventPoints.size();i++){
float t1 = eventPoints.get(i).t;
float f1 = eventPoints.get(i).getFrequencyInCents();
//int maxtFirstLevel = t1 + maxEventPointDeltaTInSteps;
float maxfFirstLevel = f1 + maxEventPointDeltaFInCents;
float minfFirstLevel = f1 - maxEventPointDeltaFInCents;
for(int j = 0; j < eventPoints.size() ;j++){
float t2 = eventPoints.get(j).t;
float f2 = eventPoints.get(j).getFrequencyInCents();
if(t1 < t2 && f1 != f2 && Math.abs(t2-t1) * frameDurationInMS > minEventPointDeltaTInMs && Math.abs(t2-t1) * frameDurationInMS < maxEventPointDeltaTInMs && f2 > minfFirstLevel && f2 < maxfFirstLevel){
float energy = eventPoints.get(i).contrast + eventPoints.get(j).contrast;
QIFFTFingerprint fingerprint;
fingerprint = new QIFFTFingerprint(eventPoints.get(i),eventPoints.get(j));
fingerprint.energy = energy;
printsOrderedByEnergy.put(energy,fingerprint);
}
}
}
//System.out.println(countPrint + " prints created, stored : " + printsOrderedByEnergy.size());
//int countPrint=0;
int maxPrintsPerPoint = Config.getInt(Key.NFFT_MAX_FINGERPRINTS_PER_EVENT_POINT);
HashMap<QIFFTEventPoint,Integer> printsPerPoint = new HashMap<QIFFTEventPoint, Integer>();
for(int i = 0; i < eventPoints.size();i++){
printsPerPoint.put(eventPoints.get(i), 0);
}
for(Float key: printsOrderedByEnergy.descendingKeySet()){
QIFFTFingerprint print = printsOrderedByEnergy.get(key);
if(printsPerPoint.get(print.p1)<maxPrintsPerPoint && printsPerPoint.get(print.p2)<maxPrintsPerPoint){
printsPerPoint.put(print.p1,printsPerPoint.get(print.p1)+1);
printsPerPoint.put(print.p2,printsPerPoint.get(print.p2)+1);
fingerprints.add(print);
//countPrint++;
}
}
}
ImmutableList<GcsFileMetadata> listDiffFiles(DateTime fromTime, @Nullable DateTime toTime) {
logger.atInfo().log("Requested restore from time: %s", fromTime);
if (toTime != null) {
logger.atInfo().log(" Until time: %s", toTime);
}
// List all of the diff files on GCS and build a map from each file's upper checkpoint time
// (extracted from the filename) to its asynchronously-loaded metadata, keeping only files with
// an upper checkpoint time > fromTime.
TreeMap<DateTime, ListenableFuture<GcsFileMetadata>> upperBoundTimesToMetadata
= new TreeMap<>();
Iterator<ListItem> listItems;
try {
// TODO(b/23554360): Use a smarter prefixing strategy to speed this up.
listItems = gcsService.list(
gcsBucket,
new ListOptions.Builder().setPrefix(DIFF_FILE_PREFIX).build());
} catch (IOException e) {
throw new RuntimeException(e);
}
DateTime lastUpperBoundTime = START_OF_TIME;
while (listItems.hasNext()) {
final String filename = listItems.next().getName();
DateTime upperBoundTime = DateTime.parse(filename.substring(DIFF_FILE_PREFIX.length()));
if (isInRange(upperBoundTime, fromTime, toTime)) {
upperBoundTimesToMetadata.put(upperBoundTime, executor.submit(() -> getMetadata(filename)));
lastUpperBoundTime = latestOf(upperBoundTime, lastUpperBoundTime);
}
}
if (upperBoundTimesToMetadata.isEmpty()) {
logger.atInfo().log("No files found");
return ImmutableList.of();
}
// Reconstruct the sequence of files by traversing backwards from "lastUpperBoundTime" (i.e. the
// last file that we found) and finding its previous file until we either run out of files or
// get to one that preceeds "fromTime".
//
// GCS file listing is eventually consistent, so it's possible that we are missing a file. The
// metadata of a file is sufficient to identify the preceding file, so if we start from the
// last file and work backwards we can verify that we have no holes in our chain (although we
// may be missing files at the end).
TreeMap<DateTime, GcsFileMetadata> sequence = new TreeMap<>();
logger.atInfo().log("Restoring until: %s", lastUpperBoundTime);
boolean inconsistentFileSet = !constructDiffSequence(
upperBoundTimesToMetadata, fromTime, lastUpperBoundTime, sequence);
// Verify that all of the elements in the original set are represented in the sequence. If we
// find anything that's not represented, construct a sequence for it.
boolean checkForMoreExtraDiffs = true; // Always loop at least once.
while (checkForMoreExtraDiffs) {
checkForMoreExtraDiffs = false;
for (DateTime key : upperBoundTimesToMetadata.descendingKeySet()) {
if (!isInRange(key, fromTime, toTime)) {
break;
}
if (!sequence.containsKey(key)) {
constructDiffSequence(upperBoundTimesToMetadata, fromTime, key, sequence);
checkForMoreExtraDiffs = true;
inconsistentFileSet = true;
break;
}
}
}
checkState(
!inconsistentFileSet,
"Unable to compute commit diff history, there are either gaps or forks in the history "
+ "file set. Check log for details.");
logger.atInfo().log(
"Actual restore from time: %s", getLowerBoundTime(sequence.firstEntry().getValue()));
logger.atInfo().log("Found %d files to restore", sequence.size());
return ImmutableList.copyOf(sequence.values());
}