com.google.common.collect.Multimap#putAll ( )源码实例Demo

下面列出了com.google.common.collect.Multimap#putAll ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: datawave   文件: UniqueTransform.java
/**
 * Multiply set1 and set2 by combining those in set1 are mutually exclusive with those in set2. Those left remaining in set1 are those entries that could
 * not be combined with anything in set2
 * 
 * @param set1
 * @param set1
 * @return the multiplication
 */
private Set<Multimap<String,String>> multiply(Set<Multimap<String,String>> set1, Set<Multimap<String,String>> set2) {
    Set<Multimap<String,String>> combined = new HashSet<>();
    for (Iterator<Multimap<String,String>> it = set1.iterator(); it.hasNext();) {
        Multimap<String,String> entry = it.next();
        boolean remove = false;
        for (Multimap<String,String> other : set2) {
            if (entry == other) {
                
            } else if (!intersects(entry.keySet(), other.keySet())) {
                Multimap<String,String> combinedFields = HashMultimap.create(entry);
                combinedFields.putAll(other);
                combined.add(combinedFields);
                remove = true;
            }
        }
        if (remove) {
            it.remove();
        }
    }
    return combined;
}
 
private List<BasicGradleTaskSelector> buildRecursively(GradleProject project) {
    Multimap<String, String> aggregatedTasks = ArrayListMultimap.create();
    for (GradleProject childProject : project.getChildren()) {
        List<BasicGradleTaskSelector> childSelectors = buildRecursively(childProject);
        for (BasicGradleTaskSelector childSelector : childSelectors) {
            aggregatedTasks.putAll(childSelector.getName(), childSelector.getTaskNames());
        }
    }
    for (GradleTask task : project.getTasks()) {
        aggregatedTasks.put(task.getName(), task.getPath());
    }
    List<BasicGradleTaskSelector> selectors = Lists.newArrayList();
    for (String selectorName : aggregatedTasks.keySet()) {
        SortedSet<String> selectorTasks = Sets.newTreeSet(new TaskNameComparator());
        selectorTasks.addAll(aggregatedTasks.get(selectorName));
        selectors.add(new BasicGradleTaskSelector().
                setName(selectorName).
                setTaskNames(selectorTasks).
                setDescription(project.getParent() != null
                        ? String.format("%s:%s task selector", project.getPath(), selectorName)
                        : String.format("%s task selector", selectorName)).
                setDisplayName(String.format("%s in %s and subprojects.", selectorName, project.getName())));
    }
    return selectors;
}
 
源代码3 项目: samza   文件: OperatorSpecGraphAnalyzer.java
public Multimap<SendToTableOperatorSpec, StreamTableJoinOperatorSpec> getSendToTableOpSpecToStreamTableJoinOpSpecs() {
  Multimap<SendToTableOperatorSpec, StreamTableJoinOperatorSpec> sendToTableOpSpecToStreamTableJoinOpSpecs =
      HashMultimap.create();

  // Map every SendToTableOperatorSpec to all StreamTableJoinOperatorSpecs referencing the same table.
  for (String tableId : tableToSendToTableOpSpecs.keySet()) {
    Collection<SendToTableOperatorSpec> sendToTableOpSpecs = tableToSendToTableOpSpecs.get(tableId);
    Collection<StreamTableJoinOperatorSpec> streamTableJoinOpSpecs =
        tableToStreamTableJoinOpSpecs.get(tableId);

    for (SendToTableOperatorSpec sendToTableOpSpec : sendToTableOpSpecs) {
      sendToTableOpSpecToStreamTableJoinOpSpecs.putAll(sendToTableOpSpec, streamTableJoinOpSpecs);
    }
  }

  return Multimaps.unmodifiableMultimap(sendToTableOpSpecToStreamTableJoinOpSpecs);
}
 
@Override
public Multimap<BulkIngestKey,Value> processBulk(KEYIN key, RawRecordContainer event, Multimap<String,NormalizedContentInterface> eventFields,
                StatusReporter reporter) {
    
    if (event.fatalError()) {
        return null;
    }
    
    this.shardId = getShardId(event);
    this.eventDataTypeName = event.getDataType().outputName();
    this.eventUid = event.getId().toString();
    
    Multimap<BulkIngestKey,Value> values = HashMultimap.create();
    // get the typical shard/index information
    values.putAll(super.processBulk(key, event, eventFields, reporter));
    
    flushTokenOffsetCache(event, values);
    
    counters.flush(reporter);
    
    return values;
}
 
源代码5 项目: datawave   文件: RangeStreamTest.java
@Test
public void testOrNoFieldIndexed() throws Exception {
    String originalQuery = "(TACO == 'bag' || TACO == 'ba')";
    ASTJexlScript script = JexlASTHelper.parseJexlQuery(originalQuery);
    
    config.setBeginDate(new Date(0));
    config.setEndDate(new Date(System.currentTimeMillis()));
    
    Multimap<String,Type<?>> dataTypes = HashMultimap.create();
    dataTypes.putAll("FOO", Sets.newHashSet(new LcNoDiacriticsType()));
    dataTypes.putAll("NUM", Sets.newHashSet(new NumberType()));
    
    config.setQueryFieldsDatatypes(dataTypes);
    config.setIndexedFields(dataTypes);
    
    MockMetadataHelper helper = new MockMetadataHelper();
    helper.setIndexedFields(dataTypes.keySet());
    
    assertFalse(new RangeStream(config, new ScannerFactory(config.getConnector()), helper).streamPlans(script).iterator().hasNext());
}
 
源代码6 项目: datawave   文件: ExpandCompositeTerms.java
/**
 * Returns a map containing only the composites that could be created from the given set of required and other fields
 *
 * @param requiredFields
 *            A collection of fields, of which at least one must be present in each returned composite field mapping
 * @param otherFields
 *            A collection of other fields at our disposal for creating composites
 * @return A multimap of composite fields, and their component fields which can be created with the given fields
 */
private Multimap<String,String> getFilteredCompositeToFieldMap(Collection<String> requiredFields, Collection<String> otherFields) {
    List<String> allFields = new ArrayList<>();
    allFields.addAll(requiredFields);
    allFields.addAll(otherFields);
    
    // determine which composites can be made
    Multimap<String,String> compositeToFieldMap = LinkedHashMultimap.create();
    for (String compositeField : config.getCompositeToFieldMap().keySet()) {
        Collection<String> componentFields = new ArrayList<>(config.getCompositeToFieldMap().get(compositeField));
        
        // determine whether one of our required fields is present
        boolean requiredFieldPresent = componentFields.stream().filter(fieldName -> requiredFields.contains(fieldName)).findAny().isPresent();
        
        // if a required field is present, and we have all of the
        // fields needed to make the composite, add it to our list
        if (requiredFieldPresent && allFields.containsAll(componentFields))
            compositeToFieldMap.putAll(compositeField, componentFields);
    }
    return compositeToFieldMap;
}
 
源代码7 项目: incubator-pinot   文件: DefaultDataProvider.java
/**
 * Fetch all anomalies based on the request Anomaly Slices (overlap with slice window)
 */
@Override
public Multimap<AnomalySlice, MergedAnomalyResultDTO> fetchAnomalies(Collection<AnomalySlice> slices) {
  Multimap<AnomalySlice, MergedAnomalyResultDTO> output = ArrayListMultimap.create();
  try {
    for (AnomalySlice slice : slices) {
      Collection<MergedAnomalyResultDTO> cacheResult = anomaliesCache.fetchSlice(slice);

      // make a copy of the result so that cache won't be contaminated by client code
      List<MergedAnomalyResultDTO> clonedAnomalies = new ArrayList<>();
      for (MergedAnomalyResultDTO anomaly : cacheResult) {
        clonedAnomalies.add((MergedAnomalyResultDTO) SerializationUtils.clone(anomaly));
      }

      LOG.info("Fetched {} anomalies for slice {}", clonedAnomalies.size(), slice);
      output.putAll(slice, clonedAnomalies);
    }

    return output;
  } catch (Exception e) {
    throw new RuntimeException("Failed to fetch anomalies from database.", e);
  }
}
 
源代码8 项目: datawave   文件: ExpandMultiNormalizedTermsTest.java
@Test
public void testNormalizedBoundsCase() throws ParseException {
    Multimap<String,Type<?>> dataTypes = HashMultimap.create();
    dataTypes.putAll("FOO", Sets.newHashSet(new NumberType()));
    
    helper.setIndexedFields(dataTypes.keySet());
    helper.setIndexOnlyFields(dataTypes.keySet());
    helper.addTermFrequencyFields(dataTypes.keySet());
    
    config.setQueryFieldsDatatypes(dataTypes);
    
    String original = "FOO > '1' && FOO < '10'";
    String expected = "(FOO > '+aE1' && FOO < '+bE1')";
    expandTerms(original, expected);
}
 
源代码9 项目: incubator-pinot   文件: MetricEntity.java
public static MetricEntity fromMetric(Map<String, Collection<String>> filterMaps, long id) {
  Multimap<String, String> filters = ArrayListMultimap.create();
  if (filterMaps != null) {
    for (Map.Entry<String, Collection<String>> entry : filterMaps.entrySet()) {
      filters.putAll(entry.getKey(), entry.getValue());
    }
  }

  return fromMetric(1.0, id, filters);
}
 
源代码10 项目: incubator-gobblin   文件: PublisherUtils.java
/**
 * Given a {@link Multimap} of {@link Extract}s to {@link WorkUnitState}s, filter out any {@link Extract}s where all
 * of the corresponding {@link WorkUnitState}s do not meet the given {@link Predicate}.
 */
public static Multimap<Extract, WorkUnitState> getExtractsForPredicate(
    Multimap<Extract, WorkUnitState> extractToWorkUnitStateMap, Predicate<WorkUnitState> predicate) {
  Multimap<Extract, WorkUnitState> successfulExtracts = ArrayListMultimap.create();
  for (Map.Entry<Extract, Collection<WorkUnitState>> entry : extractToWorkUnitStateMap.asMap().entrySet()) {
    if (Iterables.all(entry.getValue(), predicate)) {
      successfulExtracts.putAll(entry.getKey(), entry.getValue());
    }
  }
  return successfulExtracts;
}
 
源代码11 项目: helios   文件: HeliosClient.java
/**
 * Returns a list of all hosts registered in the Helios cluster that match both the given hostname
 * pattern and set of host selectors.
 *
 * @see #listHosts(Set)
 */
public ListenableFuture<List<String>> listHosts(final String namePattern,
                                                final Set<String> unparsedHostSelectors) {

  final Multimap<String, String> query = HashMultimap.create();
  query.put("namePattern", namePattern);
  query.putAll("selector", unparsedHostSelectors);

  return listHosts(query);
}
 
源代码12 项目: javaide   文件: DependencyManager.java
private static LibraryDependencyImpl convertLibInfo(
        @NonNull LibInfo libInfo,
        @NonNull Multimap<LibraryDependency, VariantDependencies> reverseMap,
        @NonNull Map<LibInfo, LibraryDependencyImpl> convertedMap) {
    LibraryDependencyImpl convertedLib = convertedMap.get(libInfo);
    if (convertedLib == null) {
        // first, convert the children.
        @SuppressWarnings("unchecked")
        List<LibInfo> children = (List<LibInfo>) (List<?>) libInfo.getDependencies();
        List<LibraryDependency> convertedChildren = Lists.newArrayListWithCapacity(children.size());

        for (LibInfo child : children) {
            convertedChildren.add(convertLibInfo(child, reverseMap, convertedMap));
        }

        // now convert the libInfo
        convertedLib = new LibraryDependencyImpl(
                libInfo.getBundle(),
                libInfo.getFolder(),
                convertedChildren,
                libInfo.getName(),
                libInfo.getProjectVariant(),
                libInfo.getProject(),
                libInfo.getRequestedCoordinates(),
                libInfo.getResolvedCoordinates(),
                libInfo.isOptional());

        // add it to the map
        convertedMap.put(libInfo, convertedLib);

        // and update the reversemap
        // get the items associated with the libInfo. Put in a fresh list as the returned
        // collection is backed by the content of the map.
        Collection<VariantDependencies> values = Lists.newArrayList(reverseMap.get(libInfo));
        reverseMap.removeAll(libInfo);
        reverseMap.putAll(convertedLib, values);
    }

    return convertedLib;
}
 
源代码13 项目: xtext-core   文件: PathTraverser.java
public Multimap<String, URI> resolvePathes(List<String> pathes, Predicate<URI> isValidPredicate) {
	Multimap<String, URI> uris = HashMultimap.create();
	for (String path : pathes) {
		Set<URI> resourceUris = findAllResourceUris(path, isValidPredicate);
		uris.putAll(path, resourceUris);
	}
	return uris;
}
 
String changeTrackingStatementQuery(TableMetadataProvider.TableMetadata tableMetadata) {
  Preconditions.checkState(
      tableMetadata.keyColumns().size() > 0,
      "Table([%s].[%s]) must have at least one primary key column.",
      tableMetadata.schemaName(),
      tableMetadata.tableName()
  );
  Collection<String> valueColumns = Collections2.filter(tableMetadata.columnSchemas().keySet(), Predicates.not(Predicates.in(tableMetadata.keyColumns())));

  String joinCriteria = joinCriteria(tableMetadata.keyColumns());
  Multimap<String, String> adfs = LinkedListMultimap.create();
  adfs.putAll("ct", tableMetadata.keyColumns());
  adfs.putAll("u", valueColumns);


  final String sql = String.format("SELECT " +
          "[ct].[sys_change_version] AS [__metadata_sys_change_version], " +
          "[ct].[sys_change_creation_version] AS [__metadata_sys_change_creation_version], " +
          "[ct].[sys_change_operation] AS [__metadata_sys_change_operation], " +
          joinSelect(adfs) + " " +
          "FROM [%s].[%s] AS [u] " +
          "RIGHT OUTER JOIN " +
          "CHANGETABLE(CHANGES [%s].[%s], ?) AS [ct] " +
          "ON %s",
      tableMetadata.schemaName(),
      tableMetadata.tableName(),
      tableMetadata.schemaName(),
      tableMetadata.tableName(),
      joinCriteria
  );
  log.trace("changeTrackingStatementQuery() - sql:\n{}", sql);
  return sql;
}
 
源代码15 项目: datawave   文件: MixedGeoAndGeoWaveTest.java
public static int ingestData(Configuration conf, String fieldName, String[] data, int startRecNum, String ingestDate) throws Exception {
    AbstractColumnBasedHandler<Text> dataTypeHandler = new AbstractColumnBasedHandler<>();
    dataTypeHandler.setup(new TaskAttemptContextImpl(conf, new TaskAttemptID()));
    
    TestIngestHelper ingestHelper = new TestIngestHelper();
    ingestHelper.setup(conf);
    
    // create and process events with WKT data
    RawRecordContainer record = new RawRecordContainerImpl();
    Multimap<BulkIngestKey,Value> keyValues = HashMultimap.create();
    int recNum = startRecNum;
    
    for (int i = 0; i < data.length; i++) {
        record.clear();
        record.setDataType(new Type(DATA_TYPE_NAME, TestIngestHelper.class, (Class) null, (String[]) null, 1, (String[]) null));
        record.setRawFileName("geodata_" + recNum + ".dat");
        record.setRawRecordNumber(recNum++);
        record.setDate(formatter.parse(ingestDate).getTime());
        record.setRawData((fieldName + data[i]).getBytes("UTF8"));
        record.generateId(null);
        record.setVisibility(new ColumnVisibility(AUTHS));
        
        final Multimap<String,NormalizedContentInterface> fields = LinkedListMultimap.create();
        for (Map.Entry<String,NormalizedContentInterface> entry : ingestHelper.getEventFields(record).entries())
            if (entry.getValue().getError() == null)
                fields.put(entry.getKey(), entry.getValue());
        
        Multimap kvPairs = dataTypeHandler.processBulk(new Text(), record, fields, new MockStatusReporter());
        
        keyValues.putAll(kvPairs);
        
        dataTypeHandler.getMetadata().addEvent(ingestHelper, record, fields);
    }
    keyValues.putAll(dataTypeHandler.getMetadata().getBulkMetadata());
    
    // write these values to their respective tables
    Connector connector = instance.getConnector("root", PASSWORD);
    connector.securityOperations().changeUserAuthorizations("root", new Authorizations(AUTHS));
    
    writeKeyValues(connector, keyValues);
    
    return recNum;
}
 
源代码16 项目: datawave   文件: MultiValueCompositeIndexTest.java
@BeforeClass
public static void setupClass() throws Exception {
    System.setProperty("subject.dn.pattern", "(?:^|,)\\s*OU\\s*=\\s*My Department\\s*(?:,|$)");
    
    createTestData();
    
    setupConfiguration(conf);
    
    AbstractColumnBasedHandler<Text> dataTypeHandler = new AbstractColumnBasedHandler<>();
    dataTypeHandler.setup(new TaskAttemptContextImpl(conf, new TaskAttemptID()));
    
    TestIngestHelper ingestHelper = new TestIngestHelper();
    ingestHelper.setup(conf);
    
    // create and process events with WKT data
    RawRecordContainer record = new RawRecordContainerImpl();
    Multimap<BulkIngestKey,Value> keyValues = HashMultimap.create();
    int recNum = 1;
    
    for (int i = 0; i < testData.size(); i++) {
        TestData entry = testData.get(i);
        
        record.clear();
        record.setDataType(new Type(DATA_TYPE_NAME, TestIngestHelper.class, (Class) null, (String[]) null, 1, (String[]) null));
        record.setRawFileName("geodata_" + recNum + ".dat");
        record.setRawRecordNumber(recNum++);
        record.setDate(formatter.parse(COMPOSITE_BEGIN_DATE).getTime());
        record.setRawData(entry.toString().getBytes("UTF8"));
        record.generateId(null);
        record.setVisibility(new ColumnVisibility(AUTHS));
        
        final Multimap<String,NormalizedContentInterface> fields = ingestHelper.getEventFields(record);
        
        Multimap<String,NormalizedContentInterface> compositeFields = ingestHelper.getCompositeFields(fields);
        for (String fieldName : compositeFields.keySet()) {
            // if this is an overloaded event field, we are replacing the existing data
            if (ingestHelper.isOverloadedCompositeField(fieldName))
                fields.removeAll(fieldName);
            fields.putAll(fieldName, compositeFields.get(fieldName));
        }
        
        Multimap kvPairs = dataTypeHandler.processBulk(new Text(), record, fields, new MockStatusReporter());
        
        keyValues.putAll(kvPairs);
        
        dataTypeHandler.getMetadata().addEvent(ingestHelper, record, fields);
    }
    
    keyValues.putAll(dataTypeHandler.getMetadata().getBulkMetadata());
    
    // write these values to their respective tables
    instance = new InMemoryInstance();
    Connector connector = instance.getConnector("root", PASSWORD);
    connector.securityOperations().changeUserAuthorizations("root", new Authorizations(AUTHS));
    
    writeKeyValues(connector, keyValues);
}
 
源代码17 项目: Clusion   文件: Partition.java
public static Multimap<Integer, String> partitioning(Multimap<String, String> lookup) {

		// Partitions Creation
		Set<String> keys = lookup.keySet();

		int partitionId = 0;
		Multimap<Integer, String> partitions = ArrayListMultimap.create();
		int counter2 = 0;

		for (String key : keys) {
			Set<Integer> keys2 = partitions.keySet();
			List<String> inter = (List<String>) lookup.get(key);
			List<String> interTMP = new ArrayList<String>(inter);

			Printer.debugln("Step number: " + counter2++ + "Number of keywords " + keys.size());

			Set<String> set = new HashSet<String>(interTMP);
			Multimap<Integer, String> partitionsTMP = ArrayListMultimap.create();

			for (Integer key2 : keys2) {

				if (!set.isEmpty()) {
					Set<String> tmp = new HashSet<String>(partitions.get(key2));

					Set<String> intersection = Sets.intersection(tmp, set);

					Set<String> difference;

					if (intersection.isEmpty()) {
						difference = tmp;
					} else {
						difference = Sets.difference(tmp, intersection);
						set = Sets.difference(set, intersection);

					}

					if (!difference.isEmpty()) {
						partitionId = partitionId + 1;
						partitionsTMP.putAll(partitionId, difference);
					}

					if (!intersection.isEmpty()) {
						partitionId = partitionId + 1;
						partitionsTMP.putAll(partitionId, intersection);
					}

				} else {
					partitionId = partitionId + 1;
					partitionsTMP.putAll(partitionId, new HashSet<String>(partitions.get(key2)));
				}

			}

			interTMP = new ArrayList<String>(set);

			if (!interTMP.isEmpty()) {

				partitionId = partitionId + 1;
				partitionsTMP.putAll(partitionId, interTMP);

			}

			partitions = ArrayListMultimap.create(partitionsTMP);
			partitionsTMP.clear();
			interTMP.clear();

		}

		Printer.debugln("Partitions size " + partitions.keySet().size());
		Printer.debugln("\n");

		return partitions;
	}
 
源代码18 项目: datawave   文件: ShardedDataTypeHandler.java
public void createShardFieldIndexColumn(RawRecordContainer event, Multimap<BulkIngestKey,Value> values, String fieldName, String fieldValue,
                byte[] visibility, byte[] shardId, String uid, long eventTimestamp, Value value) {
    values.putAll(createShardFieldIndexColumn(event, fieldName, fieldValue, visibility, visibility, null, shardId, value));
    
}
 
源代码19 项目: rya   文件: PcjTablesIT.java
/**
 * Ensure when results are already stored in Rya, that we are able to populate
 * the PCJ table for a new SPARQL query using those results.
 * <p>
 * The method being tested is: {@link PcjTables#populatePcj(Connector, String, RepositoryConnection)}
 */
@Test
public void populatePcj() throws RepositoryException, PcjException, TableNotFoundException, BindingSetConversionException, AccumuloException, AccumuloSecurityException {
    // Load some Triples into Rya.
    final Set<Statement> triples = new HashSet<>();
    triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(14))) );
    triples.add( VF.createStatement(VF.createIRI("http://Alice"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) );
    triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(16))) );
    triples.add( VF.createStatement(VF.createIRI("http://Bob"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) );
    triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(12))) );
    triples.add( VF.createStatement(VF.createIRI("http://Charlie"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) );
    triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://hasAge"), VF.createLiteral(BigInteger.valueOf(43))) );
    triples.add( VF.createStatement(VF.createIRI("http://Eve"), VF.createIRI("http://playsSport"), VF.createLiteral("Soccer")) );

    for(final Statement triple : triples) {
        ryaConn.add(triple);
    }

    // Create a PCJ table that will include those triples in its results.
    final String sparql =
            "SELECT ?name ?age " +
            "{" +
              "FILTER(?age < 30) ." +
              "?name <http://hasAge> ?age." +
              "?name <http://playsSport> \"Soccer\" " +
            "}";

    final Connector accumuloConn = cluster.getConnector();

    final String pcjTableName = new PcjTableNameFactory().makeTableName(getRyaInstanceName(), "testPcj");
    final Set<VariableOrder> varOrders = new ShiftVarOrderFactory().makeVarOrders(new VariableOrder("name;age"));
    final PcjTables pcjs = new PcjTables();
    pcjs.createPcjTable(accumuloConn, pcjTableName, varOrders, sparql);

    // Populate the PCJ table using a Rya connection.
    pcjs.populatePcj(accumuloConn, pcjTableName, ryaConn);

    // Scan Accumulo for the stored results.
    final Multimap<String, BindingSet> fetchedResults = loadPcjResults(accumuloConn, pcjTableName);

    // Make sure the cardinality was updated.
    final PcjMetadata metadata = pcjs.getPcjMetadata(accumuloConn, pcjTableName);
    assertEquals(3, metadata.getCardinality());

    // Ensure the expected results match those that were stored.
    final MapBindingSet alice = new MapBindingSet();
    alice.addBinding("name", VF.createIRI("http://Alice"));
    alice.addBinding("age", VF.createLiteral(BigInteger.valueOf(14)));

    final MapBindingSet bob = new MapBindingSet();
    bob.addBinding("name", VF.createIRI("http://Bob"));
    bob.addBinding("age", VF.createLiteral(BigInteger.valueOf(16)));

    final MapBindingSet charlie = new MapBindingSet();
    charlie.addBinding("name", VF.createIRI("http://Charlie"));
    charlie.addBinding("age", VF.createLiteral(BigInteger.valueOf(12)));

    final Set<BindingSet> results = Sets.newHashSet(alice, bob, charlie);

    final Multimap<String, BindingSet> expectedResults = HashMultimap.create();
    expectedResults.putAll("name;age", results);
    expectedResults.putAll("age;name", results);
    assertEquals(expectedResults, fetchedResults);
}
 
源代码20 项目: datawave   文件: GeoSortedQueryDataTest.java
@BeforeClass
public static void setupClass() throws Exception {
    setupEnvVariables();
    conf.addResource(ClassLoader.getSystemResource("datawave/query/tables/geo-test-config.xml"));
    resolveEnvVariables(conf);
    
    TypeRegistry.reset();
    TypeRegistry registry = TypeRegistry.getInstance(conf);
    
    TaskAttemptContext ctx = new TaskAttemptContextImpl(conf, new TaskAttemptID());
    
    AbstractColumnBasedHandler<Text> dataTypeHandler = new AbstractColumnBasedHandler<>();
    dataTypeHandler.setup(ctx);
    
    TestIngestHelper ingestHelper = (TestIngestHelper) dataTypeHandler.getHelper(registry.get(DATA_TYPE_NAME));
    
    // create and process events with WKT data
    RawRecordContainer record = new RawRecordContainerImpl();
    Multimap<BulkIngestKey,Value> keyValues = HashMultimap.create();
    int recNum = 1;
    for (int i = 0; i < wktData.length; i++) {
        record.clear();
        record.setDataType(new Type(DATA_TYPE_NAME, TestIngestHelper.class, (Class) null, (String[]) null, 1, (String[]) null));
        record.setRawFileName("geodata_" + recNum + ".dat");
        record.setRawRecordNumber(recNum++);
        record.setDate(formatter.parse(BEGIN_DATE).getTime() + dates[i]);
        record.setRawData(wktData[i].getBytes("UTF8"));
        record.generateId(null);
        record.setVisibility(new ColumnVisibility(AUTHS));
        
        final Multimap<String,NormalizedContentInterface> fields = ingestHelper.getEventFields(record);
        
        Multimap kvPairs = dataTypeHandler.processBulk(new Text(), record, fields, new MockStatusReporter());
        
        keyValues.putAll(kvPairs);
        
        dataTypeHandler.getMetadata().addEvent(ingestHelper, record, fields);
    }
    
    keyValues.putAll(dataTypeHandler.getMetadata().getBulkMetadata());
    
    // write these values to their respective tables
    instance = new InMemoryInstance();
    Connector connector = instance.getConnector("root", PASSWORD);
    connector.securityOperations().changeUserAuthorizations("root", new Authorizations(AUTHS));
    
    writeKeyValues(connector, keyValues);
}