com.google.common.collect.Maps#newTreeMap ( )源码实例Demo

下面列出了com.google.common.collect.Maps#newTreeMap ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

@Override
public final void prepare(final Map map, final TopologyContext topologyContext,
                          final OutputCollector outputCollector) {
	afinnSentimentMap = Maps.newTreeMap();
	stateSentimentMap = Maps.newTreeMap();
	this._outputCollector = outputCollector;

	//Bolt will read the AFINN Sentiment file [which is in the classpath] and stores the key, value pairs to a Map.
	try {
		final URL url = Resources.getResource(Constants.AFINN_SENTIMENT_FILE_NAME);
		final String text = Resources.toString(url, Charsets.UTF_8);
		final Iterable<String> lineSplit = Splitter.on("\n").trimResults().omitEmptyStrings().split(text);
		List<String> tabSplit;
		for (final String str: lineSplit) {
			tabSplit = Lists.newArrayList(Splitter.on("\t").trimResults().omitEmptyStrings().split(str));
			afinnSentimentMap.put(tabSplit.get(0), Integer.parseInt(tabSplit.get(1)));
		}
	} catch (final IOException ioException) {
		LOGGER.error(ioException.getMessage(), ioException);
		ioException.printStackTrace();
		//Should not occur. If it occurs, we cant continue. So, exiting at this point itself.
		System.exit(1);
	}
}
 
源代码2 项目: codemining-core   文件: CDTTokenizer.java
@Override
public SortedMap<Integer, String> tokenListWithPos(final char[] code) {
	final SortedMap<Integer, String> tokens = Maps.newTreeMap();
	tokens.put(-1, SENTENCE_START);
	tokens.put(Integer.MAX_VALUE, SENTENCE_END);

	final Scanner scanner = new Scanner();
	scanner.setSource(code);
	do {
		final int token = scanner.getNextToken();
		if (token == Token.tWHITESPACE) {
			continue;
		}
		final String nxtToken = new String(scanner.getCurrentTokenSource());
		tokens.put(scanner.getCurrentPosition(), nxtToken);
	} while (!scanner.atEnd());
	return tokens;
}
 
源代码3 项目: phoenix-tephra   文件: SnapshotCodecV2.java
@Override
protected NavigableMap<Long, TransactionManager.InProgressTx> decodeInProgress(BinaryDecoder decoder)
  throws IOException {

  int size = decoder.readInt();
  NavigableMap<Long, TransactionManager.InProgressTx> inProgress = Maps.newTreeMap();
  while (size != 0) { // zero denotes end of list as per AVRO spec
    for (int remaining = size; remaining > 0; --remaining) {
      long txId = decoder.readLong();
      long expiration = decoder.readLong();
      long visibilityUpperBound = decoder.readLong();
      int txTypeIdx = decoder.readInt();
      TransactionManager.InProgressType txType;
      try {
        txType = TransactionManager.InProgressType.values()[txTypeIdx];
      } catch (ArrayIndexOutOfBoundsException e) {
        throw new IOException("Type enum ordinal value is out of range: " + txTypeIdx);
      }
      inProgress.put(txId,
                     new TransactionManager.InProgressTx(visibilityUpperBound, expiration, txType,
                         new LongArrayList()));
    }
    size = decoder.readInt();
  }
  return inProgress;
}
 
源代码4 项目: batfish   文件: EnvironmentTest.java
@Test
public void testToString() {
  Environment e =
      new Environment(
          "testrig",
          Sets.newTreeSet(),
          Sets.newTreeSet(),
          Sets.newTreeSet(),
          Maps.newTreeMap(),
          Maps.newTreeMap(),
          Sets.newTreeSet());
  assertThat(
      e.toString(),
      equalTo(
          "Environment{testrigName=testrig, "
              + "edgeBlacklist=[], interfaceBlacklist=[], "
              + "nodeBlacklist=[], bgpTables={}, routingTables={}, "
              + "externalBgpAnnouncements=[]}"));
}
 
源代码5 项目: monasca-thresh   文件: AlarmSqlImpl.java
private byte[] calculateDimensionSHA1(final Map<String, String> dimensions) {
  // Calculate dimensions sha1 hash id.
  final StringBuilder dimensionIdStringToHash = new StringBuilder("");
  if (dimensions != null && !dimensions.isEmpty()) {
    // Sort the dimensions on name and value.
    final Map<String, String> dimensionTreeMap = Maps.newTreeMap(ImmutableSortedMap.copyOf(dimensions));
    for (final String dimensionName : dimensionTreeMap.keySet()) {
      if (dimensionName != null && !dimensionName.isEmpty()) {
        final String dimensionValue = dimensionTreeMap.get(dimensionName);
        if (dimensionValue != null && !dimensionValue.isEmpty()) {
          dimensionIdStringToHash
              .append(this.truncateString(dimensionName, MAX_COLUMN_LENGTH))
              .append(this.truncateString(dimensionValue, MAX_COLUMN_LENGTH));
        }
      }
    }
  }
  return DigestUtils.sha(dimensionIdStringToHash.toString());
}
 
源代码6 项目: PoseidonX   文件: CreateDatasourceAnalyzer.java
private void parseStreamProperties()
{
    TreeMap<String, String> properties = Maps.newTreeMap();

    StreamPropertiesContext propertiesContext = context.getDataSourceProperties();
    if(propertiesContext == null)
    {
        analyzeContext.setDatasourceConfigs(properties);
        return;
    }

    List<KeyValuePropertyContext> propertyList = propertiesContext.getProperties();

    for (KeyValuePropertyContext ctx : propertyList)
    {
        String key = ctx.getKey();
        String value = ctx.getValue();
        properties.put(key, value);
    }
    
    analyzeContext.setDatasourceConfigs(properties);
}
 
源代码7 项目: api-mining   文件: TokenTypeTokenizer.java
@Override
public SortedMap<Integer, FullToken> fullTokenListWithPos(final char[] code) {
	final Iterable<Token> tokens = lexer.getTokens(new String(code));
	final SortedMap<Integer, FullToken> tokensWithPos = Maps.newTreeMap();
	tokensWithPos.put(-1, new FullToken(SENTENCE_START, SENTENCE_START));
	tokensWithPos.put(Integer.MAX_VALUE, new FullToken(SENTENCE_END,
			SENTENCE_END));
	for (final Token tok : tokens) {
		if (isProgramToken(tok)) {
			continue;
		}
		tokensWithPos.put(tok.getPos(), new FullToken(getTokenString(tok),
				""));
	}
	return tokensWithPos;
}
 
源代码8 项目: datawave   文件: DiscoveryIteratorTest.java
TreeMap<Key,Value> buildMap(String term, String date) {
    int nShards = 10;
    TreeMap<Key,Value> map = Maps.newTreeMap();
    String[] types = new String[] {"t1", "t2", "t3"};
    Value value = new Value(makeUidList(24).toByteArray());
    for (String type : types) {
        for (int i = 0; i < nShards; i++) {
            map.put(new Key(term, "field", date + "_" + i + "\u0000" + type, "FOO"), value);
        }
    }
    return map;
}
 
源代码9 项目: google-http-java-client   文件: UriTemplateTest.java
public void testExpandSeveralTemplates() {
  SortedMap<String, Object> map = Maps.newTreeMap();
  map.put("id", "a");
  map.put("uid", "b");

  assertEquals("?id=a&uid=b", UriTemplate.expand("{?id,uid}", map, false));
}
 
源代码10 项目: codemining-core   文件: JavaTokenizer.java
@Override
public SortedMap<Integer, FullToken> fullTokenListWithPos(final char[] code) {
	// TODO Duplicate Code
	final PublicScanner scanner = prepareScanner();
	final SortedMap<Integer, FullToken> tokens = Maps.newTreeMap();
	tokens.put(-1, new FullToken(SENTENCE_START, SENTENCE_START));
	tokens.put(Integer.MAX_VALUE, new FullToken(SENTENCE_END, SENTENCE_END));
	scanner.setSource(code);
	while (!scanner.atEnd()) {
		do {
			try {
				final int token = scanner.getNextToken();
				if (token == ITerminalSymbols.TokenNameEOF) {
					break;
				}
				final String nxtToken = transformToken(token,
						scanner.getCurrentTokenString());
				final int position = scanner.getCurrentTokenStartPosition();
				tokens.put(position,
						new FullToken(nxtToken, Integer.toString(token)));
			} catch (final InvalidInputException e) {
				LOGGER.warning(ExceptionUtils.getFullStackTrace(e));
			}
		} while (!scanner.atEnd());

	}
	return tokens;
}
 
源代码11 项目: soabase   文件: DiscoveryResource.java
@GET
@Path("deploymentGroups/{serviceName}")
@Produces(MediaType.APPLICATION_JSON)
public Response getDeploymentGroups(@PathParam("serviceName") String serviceName)
{
    Map<String, Boolean> groupStates = Maps.newTreeMap();
    for ( String group : features.getDeploymentGroupManager().getKnownGroups(serviceName) )
    {
        groupStates.put(group, features.getDeploymentGroupManager().isGroupEnabled(serviceName, group));
    }
    GenericEntity<Map<String, Boolean>> entity = new GenericEntity<Map<String, Boolean>>(groupStates){};
    return Response.ok(entity).build();
}
 
源代码12 项目: bluima   文件: MyInput2RegexTokensTest.java
/**
 * @param input
 *            text to annotate
 * @param expected
 *            , in lexical order
 */
private void rassert(String input, String... expected) {

    TreeMap<String, ?> gold = Maps.newTreeMap();
    for (String exp : expected) {
        gold.put(exp, null);
    }

    TreeMap<String, ?> system = Maps.newTreeMap();
    List<Token> tokens = r.addRegexes(input);
    for (Token token : tokens) {
        system.put(token.toString(), null);
    }
    assertEquals(join(gold.keySet(), "___"), join(system.keySet(), "___"));
}
 
源代码13 项目: Rhombus   文件: ObjectMapperQueryITCase.java
@Test
public void testGetInEmptyShardList() throws Exception {
	//Build the connection manager
	ConnectionManager cm = getConnectionManager();
	cm.setLogCql(true);

	//Build our keyspace definition object
	CKeyspaceDefinition definition = JsonUtil.objectFromJsonResource(CKeyspaceDefinition.class, this.getClass().getClassLoader(), "ShardedKeyspace.js");

	//Rebuild the keyspace and get the object mapper
	cm.buildKeyspace(definition, true);
	cm.setDefaultKeyspace(definition);
	ObjectMapper om = cm.getObjectMapper();
	om.setLogCql(true);

	// Get 50 back
	long limit = 50l;
	Criteria criteria = new Criteria();
	SortedMap<String, Object> indexKeys = Maps.newTreeMap();
	indexKeys.put("account_id", accountId);
	indexKeys.put("user_id", userId);
	criteria.setIndexKeys(indexKeys);
	criteria.setLimit(limit);
	List<Map<String, Object>> results = om.list(objectType, criteria);

	assertEquals(0, results.size());

	cm.teardown();
}
 
@Override
public SortedMap<Integer, FullToken> fullTokenListWithPos(final char[] code) {
	final TokenizerImplementation tok = new TokenizerImplementation();
	final SortedMap<Integer, WhitespaceAnnotatedToken> annotatedTokens = tok
			.tokenListWithPosAndWidth(code);
	final SortedMap<Integer, FullToken> tokens = Maps.newTreeMap();

	for (final Entry<Integer, WhitespaceAnnotatedToken> entry : annotatedTokens
			.entrySet()) {
		tokens.put(entry.getKey(), new FullToken(
				annotatedTokenToString(entry.getValue()),
				entry.getValue().tokenType));
	}
	return tokens;
}
 
private void runFilteringTest(TxFilterFactory txFilterFactory,
                              List<Filter.ReturnCode> assertCodes) throws Exception {
  /*
   * Start and stop some transactions.  This will give us a transaction state something like the following
   * (numbers only reflect ordering, not actual transaction IDs):
   *   6  - in progress
   *   5  - committed
   *   4  - invalid
   *   3  - in-progress
   *   2  - committed
   *   1  - committed
   *
   *   read ptr = 5
   *   write ptr = 6
   */

  Transaction tx1 = txManager.startShort();
  txManager.canCommit(tx1.getTransactionId(), EMPTY_CHANGESET);
  txManager.commit(tx1.getTransactionId(), tx1.getWritePointer());

  Transaction tx2 = txManager.startShort();
  txManager.canCommit(tx2.getTransactionId(), EMPTY_CHANGESET);
  txManager.commit(tx2.getTransactionId(), tx2.getWritePointer());

  Transaction tx3 = txManager.startShort();
  Transaction tx4 = txManager.startShort();
  txManager.invalidate(tx4.getTransactionId());

  Transaction tx5 = txManager.startShort();
  txManager.canCommit(tx5.getTransactionId(), EMPTY_CHANGESET);
  txManager.commit(tx5.getTransactionId(), tx5.getWritePointer());

  Transaction tx6 = txManager.startShort();

  Map<byte[], Long> ttls = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
  Filter filter = txFilterFactory.getTxFilter(tx6, ttls);

  assertEquals(assertCodes.get(5),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx6.getTransactionId())));
  assertEquals(assertCodes.get(4),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx5.getTransactionId())));
  assertEquals(assertCodes.get(3),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx4.getTransactionId())));
  assertEquals(assertCodes.get(2),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx3.getTransactionId())));
  assertEquals(assertCodes.get(1),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx2.getTransactionId())));
  assertEquals(assertCodes.get(0),
               filter.filterKeyValue(newKeyValue("row1", "val1", tx1.getTransactionId())));
}
 
public static CodahaleMetricConventions makeConventions() {
  CodahaleMetricConventions conventions = new CodahaleMetricConventions();

  conventions.gaugeContextSuffix = "::Value";

  conventions.counterContextSuffix = "::Count";

  conventions.meterContextSuffixes = Maps.newTreeMap();
  conventions.meterContextSuffixes.put(
      MeterMetricType.COUNT, "::Count");
  conventions.meterContextSuffixes.put(
      MeterMetricType.MEAN_RATE_GAUGE, "::MeanRate");
  conventions.meterContextSuffixes.put(
      MeterMetricType.ONE_MIN_RATE_GAUGE, "::OneMinuteRate");
  conventions.meterContextSuffixes.put(
      MeterMetricType.FIVE_MIN_RATE_GAUGE, "::FiveMinuteRate");
  conventions.meterContextSuffixes.put(
      MeterMetricType.FIFTEEN_MIN_RATE_GAUGE, "::FifteenMinuteRate");

  conventions.timerContextSuffixes = Maps.newTreeMap();
  conventions.timerContextSuffixes.put(
      TimerMetricType.MIN, "::Min");
  conventions.timerContextSuffixes.put(
      TimerMetricType.MAX, "::Max");
  conventions.timerContextSuffixes.put(
      TimerMetricType.MEAN, "::Mean");
  conventions.timerContextSuffixes.put(
      TimerMetricType.STDDEV, "::StdDev");
  conventions.timerContextSuffixes.put(
      TimerMetricType.MEDIAN, "::50thPercentile");
  conventions.timerContextSuffixes.put(
      TimerMetricType.PERCENTILE_75, "::75thPercentile");
  conventions.timerContextSuffixes.put(
      TimerMetricType.PERCENTILE_99, "::99thPercentile");
  conventions.timerContextSuffixes.put(
      TimerMetricType.PERCENTILE_999, "::999thPercentile");
  conventions.timerContextSuffixes.put(
      TimerMetricType.COUNT, "::Count");
  conventions.timerContextSuffixes.put(
      TimerMetricType.ONE_MIN_RATE, "::OneMinuteRate");
  conventions.timerContextSuffixes.put(
      TimerMetricType.FIVE_MIN_RATE, "::FiveMinuteRate");
  conventions.timerContextSuffixes.put(
      TimerMetricType.FIFTEEN_MIN_RATE, "::FifteenMinuteRate");

  conventions.histogramContextSuffixes = Maps.newTreeMap();
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.COUNT, "::Count");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.MIN, "::Min");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.MAX, "::Max");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.MEAN, "::Mean");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.STDDEV, "::StdDev");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.MEDIAN, "::50thPercentile");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.PERCENTILE_75, "::75thPercentile");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.PERCENTILE_99, "::99thPercentile");
  conventions.histogramContextSuffixes.put(
      HistogramMetricType.PERCENTILE_999, "::999thPercentile");

  return conventions;
}
 
源代码17 项目: Rhombus   文件: CQLExecutorIteratorTest.java
public void testOneObject() throws Exception {

			//Get a connection manager based on the test properties
			ConnectionManagerTester cm = TestHelpers.getTestConnectionManager();
			cm.setLogCql(true);
			cm.buildCluster(true);

			CObjectShardList shardIdLists = new ShardListMock(Arrays.asList(1L,2L,3L,4L,5L));

			//Build our keyspace definition object
			CKeyspaceDefinition definition = JsonUtil.objectFromJsonResource(CKeyspaceDefinition.class, this.getClass().getClassLoader(), "MultiInsertKeyspace.js");

			//Rebuild the keyspace and get the object mapper
			cm.buildKeyspace(definition, true);

			ObjectMapper om = cm.getObjectMapper(definition);
			om.setLogCql(true);

			int nDataItems = 1;

			List<Map<String, Object>> values2 = generateNObjects(nDataItems);

			List<Map<String, Object>> updatedValues2 = Lists.newArrayList();
			for (Map<String, Object> baseValue : values2) {
				updatedValues2.add(JsonUtil.rhombusMapFromJsonMap(baseValue, definition.getDefinitions().get("object2")));
			}

			Map<String, List<Map<String, Object>>> multiInsertMap = Maps.newHashMap();
			multiInsertMap.put("object2", updatedValues2);

			//Insert data
			om.insertBatchMixed(multiInsertMap);

			// generate a executorIterator
			SortedMap<String, Object> indexValues = Maps.newTreeMap();
			indexValues.put("account_id", UUID.fromString("00000003-0000-0030-0040-000000030000"));
			indexValues.put("user_id", UUID.fromString("00000003-0000-0030-0040-000000030000"));

			UUID stop = UUID.fromString(uuidList.get(nDataItems-1));
			CDefinition cDefinition = definition.getDefinitions().get("object2");
			BaseCQLStatementIterator unBoundedIterator = (BaseCQLStatementIterator) CObjectCQLGenerator.makeCQLforList(KEYSPACE_NAME, shardIdLists, cDefinition, indexValues, CObjectOrdering.DESCENDING, null, stop, 10l, true, false, false);
			Session session = cm.getRhombusSession(definition);
			CQLExecutor cqlExecutor = new CQLExecutor(session, true, definition.getConsistencyLevel());
			CQLExecutorIterator cqlExecutorIterator = new CQLExecutorIterator(cqlExecutor, unBoundedIterator);
			cqlExecutorIterator.setPageSize(nDataItems);


			assertTrue(cqlExecutorIterator.hasNext());
			assertNotNull(cqlExecutorIterator.next());
			assertFalse(cqlExecutorIterator.hasNext());
			assertNull(cqlExecutorIterator.next());

		}
 
源代码18 项目: phoenix-tephra   文件: TransactionProcessorTest.java
@Test
public void testPreExistingData() throws Exception {
  String tableName = "TestPreExistingData";
  byte[] familyBytes = Bytes.toBytes("f");
  long ttlMillis = TimeUnit.DAYS.toMillis(14);
  HRegion region = createRegion(tableName, familyBytes, ttlMillis);
  try {
    region.initialize();

    // timestamps for pre-existing, non-transactional data
    long now = txVisibilityState.getVisibilityUpperBound() / TxConstants.MAX_TX_PER_MS;
    long older = now - ttlMillis / 2;
    long newer = now - ttlMillis / 3;
    // timestamps for transactional data
    long nowTx = txVisibilityState.getVisibilityUpperBound();
    long olderTx = nowTx - (ttlMillis / 2) * TxConstants.MAX_TX_PER_MS;
    long newerTx = nowTx - (ttlMillis / 3) * TxConstants.MAX_TX_PER_MS;

    Map<byte[], Long> ttls = Maps.newTreeMap(Bytes.BYTES_COMPARATOR);
    ttls.put(familyBytes, ttlMillis);

    List<Cell> cells = new ArrayList<>();
    cells.add(new KeyValue(Bytes.toBytes("r1"), familyBytes, Bytes.toBytes("c1"), older, Bytes.toBytes("v11")));
    cells.add(new KeyValue(Bytes.toBytes("r1"), familyBytes, Bytes.toBytes("c2"), newer, Bytes.toBytes("v12")));
    cells.add(new KeyValue(Bytes.toBytes("r2"), familyBytes, Bytes.toBytes("c1"), older, Bytes.toBytes("v21")));
    cells.add(new KeyValue(Bytes.toBytes("r2"), familyBytes, Bytes.toBytes("c2"), newer, Bytes.toBytes("v22")));
    cells.add(new KeyValue(Bytes.toBytes("r3"), familyBytes, Bytes.toBytes("c1"), olderTx, Bytes.toBytes("v31")));
    cells.add(new KeyValue(Bytes.toBytes("r3"), familyBytes, Bytes.toBytes("c2"), newerTx, Bytes.toBytes("v32")));

    // Write non-transactional and transactional data
    for (Cell c : cells) {
      region.put(new Put(c.getRow()).add(c.getFamily(), c.getQualifier(), c.getTimestamp(), c.getValue()));
    }

    Scan rawScan = new Scan();
    rawScan.setMaxVersions();

    Transaction dummyTransaction = TxUtils.createDummyTransaction(txVisibilityState);
    Scan txScan = new Scan();
    txScan.setMaxVersions();
    txScan.setTimeRange(TxUtils.getOldestVisibleTimestamp(ttls, dummyTransaction, true),
                        TxUtils.getMaxVisibleTimestamp(dummyTransaction));
    txScan.setFilter(TransactionFilters.getVisibilityFilter(dummyTransaction, ttls, false, ScanType.USER_SCAN));

    // read all back with raw scanner
    scanAndAssert(region, cells, rawScan);

    // read all back with transaction filter
    scanAndAssert(region, cells, txScan);

    // force a flush to clear the memstore
    region.flushcache();
    scanAndAssert(region, cells, txScan);

    // force a major compaction to remove any expired cells
    region.compactStores(true);
    scanAndAssert(region, cells, txScan);

    // Reduce TTL, this should make cells with timestamps older and olderTx expire
    long newTtl = ttlMillis / 2 - 1;
    region = updateTtl(region, familyBytes, newTtl);
    ttls.put(familyBytes, newTtl);
    txScan.setTimeRange(TxUtils.getOldestVisibleTimestamp(ttls, dummyTransaction, true),
                        TxUtils.getMaxVisibleTimestamp(dummyTransaction));
    txScan.setFilter(TransactionFilters.getVisibilityFilter(dummyTransaction, ttls, false, ScanType.USER_SCAN));

    // Raw scan should still give all cells
    scanAndAssert(region, cells, rawScan);
    // However, tx scan should not return expired cells
    scanAndAssert(region, select(cells, 1, 3, 5), txScan);

    region.flushcache();
    scanAndAssert(region, cells, rawScan);

    // force a major compaction to remove any expired cells
    region.compactStores(true);
    // This time raw scan too should not return expired cells, as they would be dropped during major compaction
    scanAndAssert(region, select(cells, 1, 3, 5), rawScan);

    // Reduce TTL again to 1 ms, this should expire all cells
    newTtl = 1;
    region = updateTtl(region, familyBytes, newTtl);
    ttls.put(familyBytes, newTtl);
    txScan.setTimeRange(TxUtils.getOldestVisibleTimestamp(ttls, dummyTransaction, true),
                        TxUtils.getMaxVisibleTimestamp(dummyTransaction));
    txScan.setFilter(TransactionFilters.getVisibilityFilter(dummyTransaction, ttls, false, ScanType.USER_SCAN));

    // force a major compaction to remove expired cells
    region.compactStores(true);
    // This time raw scan should not return any cells, as all cells have expired.
    scanAndAssert(region, Collections.<Cell>emptyList(), rawScan);
  } finally {
    region.close();
  }
}
 
源代码19 项目: datawave   文件: TLDFieldIndexAggregatorTest.java
@Test
public void apply_testAggregateFilter() throws IOException {
    EventDataQueryFilter mockFilter = EasyMock.createMock(EventDataQueryFilter.class);
    
    TypeMetadata typeMetadata = new TypeMetadata();
    AttributeFactory factory = new AttributeFactory(typeMetadata);
    
    Set<String> aggregatedFields = new HashSet<>();
    aggregatedFields.add("FOO");
    
    aggregator = new TLDFieldIndexAggregator(aggregatedFields, mockFilter, -1);
    
    TreeMap<Key,Value> treeMap = Maps.newTreeMap();
    Key fi1 = getFi("123", "FIELD1", "VALUE1", "dataType1", "123.345.456", 10);
    Key fi2 = getFi("123", "FIELD1", "VALUE2", "dataType1", "123.345.456.1", 10);
    Key fi3 = getFi("123", "FIELD1", "VALUE3", "dataType1", "123.345.456.2", 10);
    Key fi4 = getFi("123", "FIELD1", "VALUE4", "dataType1", "123.345.456.3", 10);
    // FOO included in the filter
    Key fi5 = getFi("123", "FOO", "bar", "dataType1", "123.345.456.3", 10);
    // FOO2 not included in the filter
    Key fi6 = getFi("123", "FOO2", "bar", "dataType1", "123.345.456.3", 10);
    // key outside the range which should not be aggregated
    Key fi7 = getFi("123", "XENO", "zap", "dataType1", "234.345.456", 10);
    
    treeMap.put(fi1, new Value());
    treeMap.put(fi2, new Value());
    treeMap.put(fi3, new Value());
    treeMap.put(fi4, new Value());
    treeMap.put(fi5, new Value());
    treeMap.put(fi6, new Value());
    treeMap.put(fi7, new Value());
    
    EasyMock.expect(mockFilter.keep(EasyMock.isA(Key.class))).andReturn(true);
    
    EasyMock.replay(mockFilter);
    
    SortedKeyValueIterator<Key,Value> itr = new SortedMapIterator(treeMap);
    itr.seek(new Range(), null, true);
    
    Document doc = new Document();
    aggregator.apply(itr, doc, factory);
    
    EasyMock.verify(mockFilter);
    
    // list of FIELD1 values to expect
    List<String> expectedFieldValues = new ArrayList<>();
    expectedFieldValues.add("VALUE1");
    expectedFieldValues.add("VALUE2");
    expectedFieldValues.add("VALUE3");
    expectedFieldValues.add("VALUE4");
    
    assertTrue(doc.get("FIELD1").isToKeep());
    Set<Attribute> attributes = ((Set<Attribute>) doc.get("FIELD1").getData());
    assertTrue(attributes.size() == 4);
    Iterator<Attribute> attrItr = attributes.iterator();
    while (attrItr.hasNext()) {
        Attribute attr = attrItr.next();
        assertFalse(attr.isToKeep());
        assertTrue(expectedFieldValues.remove(attr.getData().toString()));
    }
    
    assertTrue(expectedFieldValues.size() == 0);
    // FOO kept
    assertTrue(doc.get("FOO").isToKeep());
    // FOO2 not kept
    assertTrue(!doc.get("FOO2").isToKeep());
    // out of document range not included
    assertTrue(doc.get("XENO") == null);
}
 
源代码20 项目: vjtools   文件: MapUtil.java
/**
 * 根据等号左边的类型,构造类型正确的TreeMap.
 * 
 * @see com.google.common.collect.Maps#newTreeMap(Comparator)
 */
public static <C, K extends C, V> TreeMap<K, V> newSortedMap(@Nullable Comparator<C> comparator) {
	return Maps.newTreeMap(comparator);
}