org.apache.commons.lang3.tuple.Pair#getRight ( )源码实例Demo

下面列出了org.apache.commons.lang3.tuple.Pair#getRight ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: olca-app   文件: InventoryPage.java
private String getFlowColumnText(IndexFlow f, int col) {
	if (f.flow == null)
		return null;
	Pair<String, String> category = Labels.getCategory(f.flow);
	switch (col) {
	case 0:
		return Labels.name(f);
	case 1:
		return category.getLeft();
	case 2:
		return category.getRight();
	case 3:
		return Numbers.format(getAmount(f));
	case 4:
		return Labels.refUnit(f);
	default:
		return null;
	}
}
 
源代码2 项目: alf.io   文件: EventManagerIntegrationTest.java
@Test
public void testIncreaseRestrictedCategory() {
    List<TicketCategoryModification> categories = Collections.singletonList(
        new TicketCategoryModification(null, "default", 10,
            new DateTimeModification(LocalDate.now(), LocalTime.now()),
            new DateTimeModification(LocalDate.now(), LocalTime.now()),

            DESCRIPTION, BigDecimal.TEN, true, "", true, null, null, null, null, null, 0, null, null, AlfioMetadata.empty()));
    Pair<Event, String> pair = initEvent(categories, organizationRepository, userManager, eventManager, eventRepository);

    Event event = pair.getLeft();
    String username = pair.getRight();

    TicketCategory category = ticketCategoryRepository.findAllTicketCategories(event.getId()).get(0);
    Map<String, String> categoryDescription = ticketCategoryDescriptionRepository.descriptionForTicketCategory(category.getId());

    TicketCategoryModification tcm = new TicketCategoryModification(category.getId(), category.getName(), 11,
        DateTimeModification.fromZonedDateTime(category.getUtcInception()),
        DateTimeModification.fromZonedDateTime(category.getUtcExpiration()),
        categoryDescription, category.getPrice(), true, "", true, null, null, null, null, null, 0, null, null, AlfioMetadata.empty());
    Result<TicketCategory> result = eventManager.updateCategory(category.getId(), event, tcm, username);
    assertTrue(result.isSuccess());
    assertEquals(11, ticketRepository.countFreeTickets(event.getId(), category.getId()).intValue());
}
 
源代码3 项目: gatk-protected   文件: GCCorrectorUnitTest.java
@Test
public void testGCCorrection() {
    final int numSamples = 5;
    final int numTargets = 10000;

    final Pair<ReadCountCollection, double[]> data = GCBiasSimulatedData.simulatedData(numTargets, numSamples);
    final ReadCountCollection rcc1 = data.getLeft();
    final double[] gcContentByTarget = data.getRight();
    final ReadCountCollection correctedCounts1 = GCCorrector.correctCoverage(rcc1, gcContentByTarget);
    final double[] correctedNoiseBySample = GATKProtectedMathUtils.columnStdDevs(correctedCounts1.counts());
    Arrays.stream(correctedNoiseBySample).forEach(x -> Assert.assertTrue(x < 0.02));

    //check that GC correction is approximately idempotent -- if you correct again, very little should happen
    final ReadCountCollection correctedCounts2 = GCCorrector.correctCoverage(correctedCounts1, gcContentByTarget);
    final double change1 = correctedCounts1.counts().subtract(rcc1.counts()).getFrobeniusNorm();
    final double change2 = correctedCounts2.counts().subtract(correctedCounts1.counts()).getFrobeniusNorm();
    Assert.assertTrue(change2 < change1 / 10);
}
 
@Test
public void incrementalMessagesMustBeSentToClientsIfThereAreNoGaps() throws Exception {
    int lastMsgSeqNumProcessed = 1000;
    sendInitialMBOSnapshot(lastMsgSeqNumProcessed);
    final MdpFeedContext incrementContext = new MdpFeedContext(Feed.A, FeedType.I);
    final MdpPacket mdpPacketWithIncrement = MdpPacket.instance();
    for(int i = lastMsgSeqNumProcessed +1; i < lastMsgSeqNumProcessed + 10; i++) {
        ByteBuffer mboSnapshotTestMessage = ModelUtils.getMBOIncrementTestMessage(i);
        mdpPacketWithIncrement.wrapFromBuffer(mboSnapshotTestMessage);
        gapChannelController.handleIncrementalPacket(incrementContext, mdpPacketWithIncrement);
        Pair<MdpFeedContext, MdpPacket> incrementPair = testChannelController.nextIncrementalMessage();
        assertNotNull(incrementPair);
        MdpPacket mdpPacket = incrementPair.getRight();
        assertEquals(i, mdpPacket.getMsgSeqNum());
    }
}
 
源代码5 项目: geowave   文件: GeoServerIT.java
public boolean updatePoint(final String lockID) throws Exception {
  final Pair<CloseableHttpClient, HttpClientContext> clientAndContext = createClientAndContext();
  final CloseableHttpClient httpclient = clientAndContext.getLeft();
  final HttpClientContext context = clientAndContext.getRight();
  try {
    final HttpPost command = createWFSTransaction(httpclient, "1.1.0");
    command.setEntity(new StringEntity(update));
    final LinkedList<HttpResponse> capturedResponse = new LinkedList<>();
    run(new Runnable() {
      @Override
      public void run() {
        try {
          capturedResponse.add(httpclient.execute(command, context));
        } catch (final Exception e) {
          throw new RuntimeException("update point client failed", e);
        }
      }
    }, 500000);

    final HttpResponse r = capturedResponse.getFirst();

    return r.getStatusLine().getStatusCode() == Status.OK.getStatusCode();
  } finally {
    httpclient.close();
  }
}
 
源代码6 项目: gatk   文件: FuncotatorIntegrationTest.java
private void assertEqualVariantFiles(final File outputFile, final String eColiExpectedOut) {
    // Get the actual data:
    final Pair<VCFHeader, List<VariantContext>> actualVcfInfo               = VariantContextTestUtils.readEntireVCFIntoMemory(outputFile.getAbsolutePath());
    final List<VariantContext>                  actualVariantContexts       = actualVcfInfo.getRight();
    final VCFHeader                             actualVcfHeader             = actualVcfInfo.getLeft();
    final VCFInfoHeaderLine                     actualFuncotationHeaderLine = actualVcfHeader.getInfoHeaderLine(VcfOutputRenderer.FUNCOTATOR_VCF_FIELD_NAME);

    // Get the expected data:
    final Pair<VCFHeader, List<VariantContext>> expectedVcfInfo               = VariantContextTestUtils.readEntireVCFIntoMemory(new File(eColiExpectedOut).getAbsolutePath());
    final List<VariantContext>                  expectedVariantContexts       = expectedVcfInfo.getRight();
    final VCFHeader                             expectedVcfHeader             = expectedVcfInfo.getLeft();
    final VCFInfoHeaderLine                     expectedFuncotationHeaderLine = expectedVcfHeader.getInfoHeaderLine(VcfOutputRenderer.FUNCOTATOR_VCF_FIELD_NAME);

    // Check that they're equal:
    Assert.assertEquals(actualFuncotationHeaderLine, expectedFuncotationHeaderLine);
    VariantContextTestUtils.assertEqualVariants(actualVariantContexts, expectedVariantContexts);
}
 
源代码7 项目: gatk-protected   文件: GCBiasSimulatedData.java
/**
 *
 * @param readCountsFile    A simulated read counts file with GC bias effects
 * @param targetsFile       A simulated targets file with GC content annotation
 */
public static void makeGCBiasInputFiles(final Pair<ReadCountCollection, double[]> data,
                                        final File readCountsFile, final File targetsFile) throws IOException {
    final ReadCountCollection inputCounts = data.getLeft();
    final double[] gcContentByTarget = data.getRight();
    ReadCountCollectionUtils.write(readCountsFile, inputCounts);

    final TargetWriter writer = new TargetWriter(targetsFile, Collections.singleton(TargetAnnotation.GC_CONTENT));
    for (int i = 0; i < gcContentByTarget.length; i++) {
        final Target unannotatedTarget = inputCounts.records().get(i).getTarget();
        final TargetAnnotationCollection annotations = new TargetAnnotationCollection();
        annotations.put(TargetAnnotation.GC_CONTENT, Double.toString(gcContentByTarget[i]));
        final Target target = new Target(unannotatedTarget.getName(), unannotatedTarget.getInterval(), annotations);
        writer.writeRecord(target);
    }
    writer.close();
}
 
源代码8 项目: saros   文件: AnnotationManagerTest.java
/**
 * Mocks the addition and removal of range highlighters for the given ranges to the given editor
 * and file with the given text attributes.
 *
 * <p>When using this call, it is also advised to call {@link PowerMock#verify(Object...)} on
 * <code>AbstractEditorAnnotation.class</code> to ensure that the {@link
 * IExpectationSetters#atLeastOnce()} restriction is met.
 *
 * <p>{@link #prepareMockAddRemoveRangeHighlighters()} must be called before the first call to
 * this methods and {@link #replayMockAddRemoveRangeHighlighters()} must be called after the last
 * call to this method to replay the added mocking logic.
 *
 * @param ranges the ranges whose highlighter addition and removal to mock
 * @param file the file of the annotation
 * @param editor the editor of the annotation
 * @param textAttributes the text attributes of the annotation
 * @throws Exception see {@link PowerMock#expectPrivate(Object, Method, Object...)}
 */
private void mockAddRemoveRangeHighlighters(
    List<Pair<Integer, Integer>> ranges, IFile file, Editor editor, TextAttributes textAttributes)
    throws Exception {

  for (Pair<Integer, Integer> range : ranges) {
    int rangeStart = range.getLeft();
    int rangeEnd = range.getRight();

    RangeHighlighter rangeHighlighter = mockRangeHighlighter(rangeStart, rangeEnd);

    PowerMock.expectPrivate(
            AbstractEditorAnnotation.class,
            "addRangeHighlighter",
            rangeStart,
            rangeEnd,
            editor,
            textAttributes,
            file)
        .andStubReturn(rangeHighlighter);

    PowerMock.expectPrivate(
            AbstractEditorAnnotation.class, "removeRangeHighlighter", editor, rangeHighlighter)
        .atLeastOnce()
        .asStub();
  }
}
 
源代码9 项目: wpcleaner   文件: PageElementISSNConfiguration.java
/**
 * Tell if a template should be ignored for ISSN.
 * 
 * @param template Template to be checked.
 * @return True if the template should be ignored.
 */
public boolean shouldIgnoreTemplate(PageElementTemplate template) {

  // Check parameters
  if (template == null) {
    return false;
  }

  // Check if the template should be ignored
  Map<String, List<Pair<String, String>>> ignoreTemplates = version.getIgnoreTemplates();
  String templateName = Page.normalizeTitle(template.getTemplateName());
  List<Pair<String, String>> listParams = ignoreTemplates.get(templateName);
  if (listParams == null) {
    return false;
  }
  for (Pair<String, String> param : listParams) {
    if (param.getLeft() != null) {
      String paramValue = template.getParameterValue(param.getLeft());
      if (param.getRight() != null) {
        if ((paramValue != null) &&
            (paramValue.trim().equals(param.getRight()))) {
          return true; // Ignore all templates with this name and parameter set to a given value
        }
      } else {
        if (paramValue != null) {
          return true; // Ignore all templates with this name and parameter present
        }
      }
    } else {
      return true; // Ignore all templates with this name
    }
  }

  return false;
}
 
源代码10 项目: owltools   文件: BuilderTools.java
public static String buildTaxonString(Pair<String, String> taxonRelPair) {
	if (taxonRelPair != null) {
		String taxId = "taxon:"+removePrefix(taxonRelPair.getLeft(), ':');
		String rel = taxonRelPair.getRight();
		if (rel != null) {
			return rel+"("+taxId+")";
		}
		return taxId;
	}
	return null;
}
 
源代码11 项目: jopenfst   文件: Determinize.java
private void expandDeferredFinalStates(Deque<DetElement> finalQueue) {
  HashBiMap<Integer, GallicWeight> outputStateIdToFinalSuffix = HashBiMap.create();
  while (!finalQueue.isEmpty()) {
    DetElement element = finalQueue.removeFirst();
    for (GallicWeight gallicWeight : element.residual.getWeights()) {
      // factorization is like a simple version of the divisor/divide calculation earlier
      Pair<GallicWeight, GallicWeight> factorized = gallicSemiring.factorize(gallicWeight);
      GallicWeight prefix = factorized.getLeft();
      GallicWeight suffix = factorized.getRight();
      if (!outputStateIdToFinalSuffix.inverse().containsKey(suffix)) {
        // we don't have a synthetic state for this suffix yet
        MutableState newOutputState = outputFst.newState();
        outputStateIdToFinalSuffix.put(newOutputState.getId(), suffix);
        if (suffix.getLabels().isEmpty()) {
          // this suffix is a real final state, and there's no more work to do
          newOutputState.setFinalWeight(suffix.getWeight());
        } else {
          // this suffix still has more labels to emit, so leave final weight as zero and enqueue for expansion
          finalQueue.addLast(new DetElement(newOutputState.getId(), suffix));
        }
      }
      Integer outputStateId = outputStateIdToFinalSuffix.inverse().get(suffix);
      MutableState nextState = checkNotNull(outputFst.getState(outputStateId), "state should exist", outputStateId);
      MutableState thisState = checkNotNull(outputFst.getState(element.inputStateId));
      Preconditions.checkArgument(prefix.getLabels().size() == 1, "prefix size should be 1", prefix);
      int oLabel = prefix.getLabels().get(0);
      // note that openfst has an 'increment subsequent epsilons' feature so that these paths can still be
      // guarenteed to be deterministic (with just multiple definitions of <EPS>; this feature would go here
      // if we decide to implement it in the future
      outputFst.addArc(thisState, this.outputEps, oLabel, nextState, prefix.getWeight());
    }
  }
}
 
源代码12 项目: quaerite   文件: ExperimentFactory.java
public Pair<Experiment, Experiment> crossover(Experiment parentA, Experiment parentB) {
    StringFeatureFactory featureFactory = (StringFeatureFactory)featureFactories.get(SEARCH_SERVER_URLS);
    Pair<URL, URL> urls = featureFactory.crossover(
            new URL(parentA.getSearchServerUrl()),
            new URL(parentB.getSearchServerUrl()));
    Pair<CustomHandler, CustomHandler> customHandlers = Pair.of(null, null);
    if (featureFactories.get(CustomHandlerFactory.NAME) != null) {
        customHandlers = featureFactories.get(CustomHandlerFactory.NAME)
                .crossover(parentA.getCustomHandler(), parentB.getCustomHandler());
    }

    QueryFactory queryFactory = (QueryFactory)featureFactories.get(
            QueryFactory.NAME);

    Pair<Query, Query> queries = queryFactory.crossover(parentA.getQuery(), parentB.getQuery());

    URL urlA = (MathUtil.RANDOM.nextFloat() <= 0.5) ? urls.getLeft()
            : urls.getRight();
    CustomHandler customHandlerA = (MathUtil.RANDOM.nextFloat() <= 0.5) ?
            customHandlers.getLeft() : customHandlers.getRight();
    Query queryA = (MathUtil.RANDOM.nextFloat() <= 0.5) ?
            queries.getLeft() : queries.getRight();
    Experiment childA = new Experiment("childA", urlA.toString(),
            customHandlerA, queryA);

    URL urlB = (MathUtil.RANDOM.nextFloat() <= 0.5) ? urls.getLeft() : urls.getRight();
    CustomHandler customHandlerB = (MathUtil.RANDOM.nextFloat() <= 0.5) ?
            customHandlers.getLeft() : customHandlers.getRight();
    Query queryB = (MathUtil.RANDOM.nextFloat() <= 0.5) ?
            queries.getLeft() : queries.getRight();
    Experiment childB = new Experiment("childB", urlB.toString(),
            customHandlerB, queryB);
    addFilterQueries(childA);
    addFilterQueries(childB);
    return Pair.of(childA, childB);

}
 
源代码13 项目: geowave   文件: RowRangeHistogramStatistics.java
@Override
public InternalDataStatistics<T, NumericHistogram, PartitionStatisticsQueryBuilder<NumericHistogram>> duplicate() {
  final Pair<String, byte[]> pair =
      PartitionStatisticsQueryBuilder.decomposeIndexAndPartitionFromId(extendedId);
  return new RowRangeHistogramStatistics<>(
      adapterId,
      pair.getLeft(), // indexName
      pair.getRight());
}
 
/**
 * Generates a complete category GraphQL query with a selection of the given category identifier.
 *
 * @param identifier Category identifier, usually the category id
 * @return GraphQL query as string
 */
public String generateQuery(String identifier) {
    Pair<QueryQuery.CategoryArgumentsDefinition, CategoryTreeQueryDefinition> args = generateQueryArgs(identifier);
    QueryQuery.CategoryArgumentsDefinition searchArgs = args.getLeft();

    CategoryTreeQueryDefinition queryArgs = args.getRight();

    return Operations.query(query -> query
        .category(searchArgs, queryArgs)).toString();
}
 
源代码15 项目: java   文件: QuantityFormatter.java
private String toBase10String(final Quantity quantity) {
    final BigDecimal amount = quantity.getNumber();
    final long value = amount.unscaledValue().longValue();
    final int exponent = -amount.scale();
    final Pair<Long, Integer> resultAndTimes = removeFactorsForBase(value, 10);
    final int postFactoringExponent = exponent + resultAndTimes.getRight();
    final Pair<Long, Integer> valueAndExponent = ensureExponentIsMultipleOf3(resultAndTimes.getLeft(), postFactoringExponent);
    return valueAndExponent.getLeft() + new SuffixFormatter().format(quantity.getFormat(), valueAndExponent.getRight());
}
 
源代码16 项目: gatk   文件: ReadUtils.java
public static Optional<Byte> getReadBaseQualityAtReferenceCoordinate(final GATKRead read, final int refCoord) {
    if (refCoord < read.getStart() || read.getEnd() < refCoord) {
        return Optional.empty();
    }
    final Pair<Integer, CigarOperator> offsetAndOperator = getReadIndexForReferenceCoordinate(read.getSoftStart(), read.getCigar(), refCoord);
    return (offsetAndOperator.getRight() != null && offsetAndOperator.getRight().consumesReadBases()) ?
            Optional.of(read.getBaseQuality(offsetAndOperator.getLeft())) : Optional.empty();
}
 
源代码17 项目: pravega   文件: SegmentWithRange.java
public static Range fromPair(Pair<Double, Double> pair) {
    return new Range(pair.getLeft(), pair.getRight()); 
}
 
源代码18 项目: fdb-record-layer   文件: TextScan.java
@Nullable
private static Boolean entriesContainAllWithin(@Nonnull List<IndexEntry> entries, int maxDistance) {
    if (entries.isEmpty()) {
        return null;
    }
    List<List<Integer>> positionLists = getPositionsLists(entries);
    if (positionLists.stream().anyMatch(List::isEmpty)) {
        // Remove any empty lists. They indicate that the token is so prevalent
        // that the position list information is not retained.
        positionLists = positionLists.stream().filter(list -> !list.isEmpty()).collect(Collectors.toList());
        if (positionLists.isEmpty()) {
            // If they are all empty, then we assume that they were all close.
            return Boolean.TRUE;
        }
    }

    PriorityQueue<Pair<Integer, Iterator<Integer>>> minQueue = new PriorityQueue<>(positionLists.size(), Comparator.comparingInt(Pair::getLeft));
    int max = Integer.MIN_VALUE;
    for (List<Integer> positionList : positionLists) {
        Iterator<Integer> positionIterator = positionList.iterator();
        int value = positionIterator.next();
        max = Math.max(max, value);
        minQueue.add(Pair.of(value, positionIterator));
    }

    while (true) {
        // Pop the smallest position off of the queue and check to see
        // if it is within maxDistance of the current largest value.
        Pair<Integer, Iterator<Integer>> minElem = minQueue.poll();
        int min = minElem.getLeft();
        if (max - min <= maxDistance) {
            // Current span is within maximum allowed. Return true.
            return Boolean.TRUE;
        }
        Iterator<Integer> minIterator = minElem.getRight();
        if (minIterator.hasNext()) {
            // Advance this iterator and place it back in the queue with the
            // new associated value.
            int nextValue = minIterator.next();
            max = Math.max(max, nextValue);
            minQueue.add(Pair.of(nextValue, minIterator));
        } else {
            // Exhausted one of the position lists. We didn't find a span that
            // was less than or equal to the maximum allowed span.
            break;
        }
    }
    return Boolean.FALSE;
}
 
源代码19 项目: pulsar   文件: WatermarkCountEvictionPolicy.java
@Override
public void restoreState(Pair<Long, Long> state) {
    currentCount.set(state.getLeft());
    processed = state.getRight();
}
 
源代码20 项目: gatk   文件: InbreedingCoeffUnitTest.java
@Test
public void testInbreedingCoeffForMultiallelicVC() {
    //make sure that compound hets (with no ref) don't add to het count
    VariantContext test1 = makeVC("1", Arrays.asList(Aref, T, C),
            makeG("s1",Aref,T,2530,0,7099,366,3056,14931),
            makeG("s2",T,T,7099,2530,0,7099,366,3056,14931),
            makeG("s3",T,C,7099,2530,7099,3056,0,14931),
            makeG("s4",Aref,T,2530,0,7099,366,3056,14931),
            makeG("s5",T,T,7099,2530,0,7099,366,3056,14931),
            makeG("s6",Aref,T,2530,0,7099,366,3056,14931),
            makeG("s7",T,T,7099,2530,0,7099,366,3056,14931),
            makeG("s8",Aref,T,2530,0,7099,366,3056,14931),
            makeG("s9",T,T,7099,2530,0,7099,366,3056,14931),
            makeG("s10",Aref,T,2530,0,7099,366,3056,14931));

    final Pair<Integer, Double> pair1 = InbreedingCoeff.calculateIC(test1, test1.getGenotypes());
    final int count1 = pair1.getLeft();
    final double ICresult1 = pair1.getRight();
    Assert.assertEquals(count1, 10, "count1");
    Assert.assertEquals(ICresult1, -0.3333333, DELTA_PRECISION, "Pass");

    //make sure that hets with different alternate alleles all get counted
    VariantContext test2 = makeVC("2", Arrays.asList(Aref, T, C),
        makeG("s1",Aref,C,4878,1623,11297,0,7970,8847),
        makeG("s2",Aref,T,2530,0,7099,366,3056,14931),
        makeG("s3",Aref,T,3382,0,6364,1817,5867,12246),
        makeG("s4",Aref,T,2488,0,9110,3131,9374,12505),
        makeG("s5",Aref,C,4530,2006,18875,0,6847,23949),
        makeG("s6",Aref,T,5325,0,18692,389,16014,24570),
        makeG("s7",Aref,T,2936,0,29743,499,21979,38630),
        makeG("s8",Aref,T,6902,0,8976,45,5844,9061),
        makeG("s9",Aref,T,5732,0,10876,6394,11408,17802),
        makeG("s10",Aref,T,2780,0,25045,824,23330,30939));

    final Pair<Integer, Double> pair2 = InbreedingCoeff.calculateIC(test2, test2.getGenotypes());
    final int count2 = pair2.getLeft();
    final double ICresult2 = pair2.getRight();
    Assert.assertEquals(ICresult2, -1.0, DELTA_PRECISION, "Pass");
    Assert.assertEquals(count2, 10, "count2");

    //test the annotate method
    final Map<String, Object> annots = new InbreedingCoeff().annotate(null, test2, null);
    Assert.assertEquals(annots.keySet(), Collections.singleton(GATKVCFConstants.INBREEDING_COEFFICIENT_KEY), "annots");
    Assert.assertEquals(annots.values().size(), 1, "size");
    Assert.assertEquals(Double.parseDouble((String)annots.values().iterator().next()), -1.0, DELTA_PRECISION, "ic");

    final Map<String, Object> annots3 = new InbreedingCoeff(Collections.singleton("s1")).annotate(null, test2, null);
    Assert.assertTrue(annots3.isEmpty());//not enough samples

    final Map<String, Object> annots4 = new InbreedingCoeff(new LinkedHashSet<>(Arrays.asList("s1", "s2", "s3", "s4", "s5", "s6", "s7", "s8", "s9"))).annotate(null, test2, null);
    Assert.assertTrue(annots4.isEmpty());//not enough samples
}