com.google.common.collect.Multimap#get ( )源码实例Demo

下面列出了com.google.common.collect.Multimap#get ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: disconf   文件: ScanPrinterUtils.java
/**
 * 打印出StoreMap的数据
 */
public static void printStoreMap(Reflections reflections) {

    LOGGER.info("Now we will print store map......");

    Store store = reflections.getStore();
    Map<String/* indexName */, Multimap<String, String>> storeMap = store.getStoreMap();
    for (String indexName : storeMap.keySet()) {

        LOGGER.info("====================================");
        LOGGER.info("indexName:" + indexName);

        Multimap<String, String> multimap = storeMap.get(indexName);

        for (String firstName : multimap.keySet()) {
            Collection<String> lastNames = multimap.get(firstName);
            LOGGER.info("\t\t" + firstName + ": " + lastNames);
        }

    }
}
 
源代码2 项目: endpoints-java   文件: JsonConfigWriter.java
@Override
public Map<ApiKey, String> writeConfig(Iterable<? extends ApiConfig> configs)
    throws ApiConfigException {
  Multimap<ApiKey, ? extends ApiConfig> apisByKey = Multimaps.index(configs,
      new Function<ApiConfig, ApiKey>() {
        @Override public ApiKey apply(ApiConfig config) {
          return config.getApiKey();
        }
      });

  // This *must* retain the order of apisByKey so the lily_java_api BUILD rule has predictable
  // output order.
  Map<ApiKey, String> results = Maps.newLinkedHashMap();
  for (ApiKey apiKey : apisByKey.keySet()) {
    Collection<? extends ApiConfig> apiConfigs = apisByKey.get(apiKey);
    validator.validate(apiConfigs);
    results.put(apiKey, generateForApi(apiConfigs));
  }
  return results;
}
 
源代码3 项目: datawave   文件: DiscoveryLogic.java
/**
 * This attempts to normalize all of the {@code <value, field>} tuples with the corresponding {@code <field, normalizer>} tuple. The Normalization object
 * will determine whether or not a regex or literal is being normalized.
 *
 * See the {@link PatternNormalization} and {@link LiteralNormalization} implementations.
 *
 * @param normalization
 * @param valuesToFields
 * @param dataTypeMap
 * @return
 */
public static Multimap<String,String> normalize(Normalization normalization, Multimap<String,String> valuesToFields, Multimap<String,Type<?>> dataTypeMap) {
    Multimap<String,String> normalizedValuesToFields = HashMultimap.create();
    for (Entry<String,String> valueAndField : valuesToFields.entries()) {
        String value = valueAndField.getKey(), field = valueAndField.getValue();
        for (Type<?> dataType : dataTypeMap.get(field)) {
            try {
                log.debug("Attempting to normalize [" + value + "] with [" + dataType.getClass() + "]");
                String normalized = normalization.normalize(dataType, field, value);
                normalizedValuesToFields.put(normalized, field);
                log.debug("Normalization succeeded!");
            } catch (Exception exception) {
                log.debug("Normalization failed.");
            }
        }
    }
    return normalizedValuesToFields;
}
 
源代码4 项目: yql-plus   文件: TraceFormatter.java
private static void dumpTree(CodeOutput out, Multimap<Integer, TraceEntry> childmap, Multimap<Integer, TraceLogEntry> logmap, Collection<TraceEntry> traceEntries) {
    for (TraceEntry entry : traceEntries) {
        out.println(String.format("%s : %s (%.2f - %.2f) (%.2f)", entry.getGroup(), entry.getName(), entry.getStartMilliseconds(), entry.getEndMilliseconds(), entry.getDurationMilliseconds()));
        out.indent();
        for (TraceLogEntry log : logmap.get(entry.getId())) {
            out.println("%6d [%s]: %s", TimeUnit.NANOSECONDS.toMicros(log.getTicks()), log.getLevel(), log);
            if (log.get() instanceof ThrowableEvent) {
                ThrowableEvent event = (ThrowableEvent) log.get();
                out.indent();
                out.println("Exception: %s", event.message);
                out.indent();
                CodeFormatter.writeException(event.throwable, out);
                out.dedent();
                out.dedent();
            }
        }
        dumpTree(out, childmap, logmap, childmap.get(entry.getId()));
        out.dedent();
    }
}
 
源代码5 项目: hmftools   文件: GCRatioSupplier.java
GCRatioSupplier(@NotNull final Multimap<Chromosome, GCProfile> gcProfiles, @NotNull final Multimap<Chromosome, CobaltCount> counts) {
    final GenomeRegionSelector<GCProfile> gcProfileSelector = GenomeRegionSelectorFactory.createImproved(gcProfiles);

    final GCRatioNormalization tumorRatiosBuilder = new GCRatioNormalization();
    final GCRatioNormalization referenceRatiosBuilder = new GCRatioNormalization();

    for (Chromosome chromosome : counts.keySet()) {
        for (CobaltCount cobaltPosition : counts.get(chromosome)) {
            final Optional<GCProfile> optionalGCProfile = gcProfileSelector.select(cobaltPosition);
            if (optionalGCProfile.isPresent()) {
                final GCProfile gcProfile = optionalGCProfile.get();
                referenceRatiosBuilder.addPosition(chromosome, gcProfile, cobaltPosition.referenceReadCount());
                tumorRatiosBuilder.addPosition(chromosome, gcProfile, cobaltPosition.tumorReadCount());
            }
        }
    }

    referenceGCMedianReadCount = referenceRatiosBuilder.gcMedianReadCount();
    referenceRatios = referenceRatiosBuilder.build(referenceGCMedianReadCount);

    tumorGCMedianReadCount = tumorRatiosBuilder.gcMedianReadCount();
    tumorRatios = tumorRatiosBuilder.build(tumorGCMedianReadCount);
}
 
源代码6 项目: cloudbreak   文件: SaltStatesTest.java
@Test
public void jidInfoHighTest() throws Exception {
    String jobId = "2";

    InputStream responseStream = SaltStatesTest.class.getResourceAsStream("/jid_response.json");
    String response = IOUtils.toString(responseStream);
    responseStream.close();
    Map<?, ?> responseMap = new ObjectMapper().readValue(response, Map.class);
    when(saltConnector.run(eq("jobs.lookup_jid"), any(), any(), eq("jid"), any())).thenReturn(responseMap);

    Multimap<String, String> jidInfo = SaltStates.jidInfo(saltConnector, jobId, target, StateType.HIGH);
    verify(saltConnector, times(1)).run("jobs.lookup_jid", RUNNER, Map.class, "jid", jobId);

    assertThat(jidInfo.keySet(), hasSize(1));
    assertThat(jidInfo.entries(), hasSize(4));
    String hostName = jidInfo.keySet().iterator().next();
    Collection<String> hostErrors = jidInfo.get(hostName);

    assertThat(hostErrors, containsInAnyOrder(
            "\nName: /opt/ambari-server/ambari-server-init.sh\nComment: Source file salt://ambari/scripts/ambari-server-initttt.sh not found",
            "\nName: ambari-server\nComment: Service ambari-server is already enabled, and is dead",
            "\nComment: Command \"/opt/ambari-server/install-mpack-1.sh\" run\nStderr: + ARGS= + echo yes + ambari-server install-mpack --",
            "\nName: haveged\nComment: Package haveged is already installed."));
}
 
private void transformDeptTree(List<DepTreeRes> deptLevelList, String level, Multimap<String, DepTreeRes> levelDeptMap) {
    for (DepTreeRes deptLevelDto : deptLevelList) {
        // 遍历该层的每个元素
        // 处理当前层级的数据
        String nextLevel = LevelUtil.calculateLevel(level, deptLevelDto.getId());
        // 处理下一层
        List<DepTreeRes> tempDeptList = (List<DepTreeRes>) levelDeptMap.get(nextLevel);
        if (!CollectionUtils.isEmpty(tempDeptList)) {
            // 排序
            tempDeptList.sort(deptSeqComparator);
            // 设置下一层部门
            deptLevelDto.setDepTreeResList(tempDeptList);
            // 进入到下一层处理
            transformDeptTree(tempDeptList, nextLevel, levelDeptMap);
        }
    }
}
 
源代码8 项目: dsl-devkit   文件: CheckCfgJavaValidator.java
/**
 * Gets duplicates of a given type based on a guard (predicate). A given function is used for converting an instance of type T
 * to a string which is used for checking for duplicates.
 *
 * @param <T>
 *          the generic type
 * @param predicate
 *          the predicate acting as a guard
 * @param function
 *          returns a string for an instance of type T
 * @param elements
 *          the elements to be checked
 * @return the duplicates
 */
private <T extends EObject> Iterable<T> getDuplicates(final Predicate<T> predicate, final Function<T, String> function, final Iterable<T> elements) {
  List<T> result = Lists.newArrayList();
  Multimap<String, T> multiMap = Multimaps.newMultimap(Maps.<String, Collection<T>> newHashMap(), new Supplier<Collection<T>>() {
    @Override
    public Collection<T> get() {
      return Lists.<T> newArrayList();
    }
  });
  for (final T candidate : elements) {
    if (predicate.apply(candidate)) {
      multiMap.put(function.apply(candidate), candidate);
    }
  }
  for (String elem : multiMap.keySet()) {
    final Collection<T> duplicates = multiMap.get(elem);
    if (duplicates.size() > 1) {
      result.addAll(duplicates);
    }
  }

  return result;
}
 
源代码9 项目: molgenis   文件: L2CacheRepositoryDecorator.java
private List<Entity> findAllCache(List<Object> ids, Fetch fetch) {
  if (TransactionSynchronizationManager.isCurrentTransactionReadOnly()) {
    List<Entity> unorderedEntities = l2Cache.getBatch(delegate(), ids, fetch);
    return createOrderedEntities(ids, unorderedEntities);
  } else {
    String entityTypeId = getEntityType().getId();
    Multimap<Boolean, Object> partitionedIds =
        Multimaps.index(
            ids, id -> transactionInformation.isEntityDirty(EntityKey.create(entityTypeId, id)));
    Collection<Object> cleanIds = partitionedIds.get(false);
    Collection<Object> dirtyIds = partitionedIds.get(true);

    List<Entity> batch = l2Cache.getBatch(delegate(), cleanIds, fetch);
    Map<Object, Entity> result = newHashMap(uniqueIndex(batch, Entity::getIdValue));
    result.putAll(
        delegate().findAll(dirtyIds.stream(), fetch).collect(toMap(Entity::getIdValue, e -> e)));

    return ids.stream().filter(result::containsKey).map(result::get).collect(toList());
  }
}
 
源代码10 项目: datawave   文件: AbstractFunctionalQuery.java
protected void addMetadataEntries(Multimap<String,Key> metadataEntries) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
    MultiTableBatchWriter multiTableWriter = connector.createMultiTableBatchWriter(new BatchWriterConfig());
    BatchWriter writer = multiTableWriter.getBatchWriter(QueryTestTableHelper.METADATA_TABLE_NAME);
    for (String field : metadataEntries.keySet()) {
        Mutation mutation = new Mutation(new Text(field));
        for (Key key : metadataEntries.get(field)) {
            metadataEntries.put(field, key);
            mutation.put(key.getColumnFamily(), key.getColumnQualifier(), key.getColumnVisibilityParsed(), new Value());
        }
        writer.addMutation(mutation);
    }
    writer.close();
    connector.tableOperations().compact(QueryTestTableHelper.METADATA_TABLE_NAME, new Text("\0"), new Text("~"), true, true);
}
 
源代码11 项目: uima-uimafit   文件: EnhanceMojo.java
/**
 * Write a report on any meta data missing from components.
 */
private void writeMissingMetaDataReport(File aReportFile, Multimap<String, String> aReportData)
        throws MojoExecutionException {
  String[] classes = aReportData.keySet().toArray(new String[aReportData.keySet().size()]);
  Arrays.sort(classes);

  PrintWriter out = null;
  FileUtils.mkdir(aReportFile.getParent());
  try {
    out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(aReportFile), encoding));

    if (classes.length > 0) {
      for (String clazz : classes) {
        out.printf("%s %s%n", MARK_CLASS, clazz);
        Collection<String> messages = aReportData.get(clazz);
        if (messages.isEmpty()) {
          out.printf("  No problems");
        } else {
          for (String message : messages) {
            out.printf("  %s%n", message);
          }
        }
        out.printf("%n");
      }
    } else {
      out.printf("%s%n", MARK_NO_MISSING_META_DATA);
    }
  } catch (IOException e) {
    throw new MojoExecutionException("Unable to write missing meta data report to ["
            + aReportFile + "]" + ExceptionUtils.getRootCauseMessage(e), e);
  } finally {
    IOUtils.closeQuietly(out);
  }
}
 
源代码12 项目: google-ads-java   文件: GetAccountHierarchy.java
/**
 * Prints the specified account's hierarchy using recursion.
 *
 * @param customerClient the customer client whose info will be printed and its child accounts
 *     will be processed if it's a manager.
 * @param customerIdsToChildAccounts a map containing the account hierarchy information.
 * @param depth the current depth we are printing from in the account hierarchy.
 */
private void printAccountHierarchy(
    CustomerClient customerClient,
    Multimap<Long, CustomerClient> customerIdsToChildAccounts,
    int depth) {
  String leadingSpace = " ";
  if (depth == 0) {
    System.out.println("Customer ID (Descriptive Name, Currency Code, Time Zone");
    leadingSpace = "";
  } else {
    System.out.println("|");
  }
  System.out.print(Strings.repeat("-", depth * 2));
  long customerId = customerClient.getId().getValue();
  System.out.printf(
      leadingSpace + "%d ('%s', '%s', '%s')%n",
      customerId,
      customerClient.getDescriptiveName().getValue(),
      customerClient.getCurrencyCode().getValue(),
      customerClient.getTimeZone().getValue());

  // Recursively calls this function for all child accounts of customerClient if the current
  // customer is a manager account.
  for (CustomerClient childCustomer : customerIdsToChildAccounts.get(customerId)) {
    printAccountHierarchy(childCustomer, customerIdsToChildAccounts, depth + 1);
  }
}
 
源代码13 项目: brooklyn-server   文件: BundleUpgradeParser.java
private static Multimap<VersionedName, VersionRangedName> parseUpgradeForTypesHeader(String input, Bundle bundle, Supplier<? extends Iterable<? extends RegisteredType>> typeSupplier, Multimap<VersionedName, VersionRangedName> upgradesForBundles) {
    List<String> sourceVersions = null;
    if (upgradesForBundles!=null) {
        Collection<VersionRangedName> acceptableRanges = upgradesForBundles.get(new VersionedName(bundle));
        if (acceptableRanges!=null && !acceptableRanges.isEmpty()) {
            for (VersionRangedName n: acceptableRanges) {
                if (n.getSymbolicName().equals(bundle.getSymbolicName())) {
                    if (sourceVersions==null) {
                        sourceVersions = MutableList.of();
                    }
                    sourceVersions.add(n.getOsgiVersionRange().toString());
                }
            }
        }
    }
    Set<VersionedName> typeSupplierNames = MutableList.copyOf(typeSupplier.get()).stream().map(
        (t) -> VersionedName.toOsgiVersionedName(t.getVersionedName())).collect(Collectors.toSet());
    if (input==null && sourceVersions!=null && !sourceVersions.isEmpty()) {
        input = "*";
    }
    return parseVersionRangedNameEqualsVersionedNameList(input, false,
        // wildcard means all types, all versions of this bundle this bundle replaces
        getTypeNamesInBundle(typeSupplier), sourceVersions,
        // default target is same type at version of this bundle
        (i) -> { 
            VersionedName targetTypeAtBundleVersion = new VersionedName(i.getSymbolicName(), bundle.getVersion());
            if (!typeSupplierNames.contains(VersionedName.toOsgiVersionedName(targetTypeAtBundleVersion))) {
                throw new IllegalStateException("Bundle manifest declares it upgrades "+i+" "
                    + "but does not declare an explicit target and does not contain inferred target "+targetTypeAtBundleVersion);
            }
            return targetTypeAtBundleVersion; 
        });
}
 
源代码14 项目: api-mining   文件: APICallClustererMAPO.java
public static void main(final String[] args) throws Exception {

		final String dataset = "/afs/inf.ed.ac.uk/user/j/jfowkes/Code/Sequences/Datasets/API/srclibs/calls/netty.arff";
		final Multimap<Integer, String> assignments = clusterAPICallSeqs(dataset, 10);

		// Print assignments
		for (final int cluster : assignments.keySet()) {
			System.out.println("===== Cluster #" + cluster);
			for (final String seq : assignments.get(cluster))
				System.out.println("      " + seq);
		}
	}
 
源代码15 项目: NOVA-Core   文件: ASMHelper.java
public static byte[] injectMethods(String name, byte[] bytes, Multimap<String, MethodInjector> injectors) {
	if (injectors.containsKey(name)) {
		ClassNode cnode = createClassNode(bytes);

		for (MethodInjector injector : injectors.get(name)) {
			MethodNode method = findMethod(injector.method, cnode);
			if (method == null) {
				throw new RuntimeException("Method not found: " + injector.method);
			}
			Game.logger().info("Injecting into {}\n{}", injector.method, printInsnList(injector.injection));

			List<AbstractInsnNode> callNodes;
			if (injector.before) {
				callNodes = InstructionComparator.insnListFindStart(method.instructions, injector.needle);
			} else {
				callNodes = InstructionComparator.insnListFindEnd(method.instructions, injector.needle);
			}

			if (callNodes.size() == 0) {
				throw new RuntimeException("Needle not found in Haystack: " + injector.method + "\n" + printInsnList(injector.needle));
			}

			for (AbstractInsnNode node : callNodes) {
				if (injector.before) {
					Game.logger().info("Injected before: {}", printInsn(node));
					method.instructions.insertBefore(node, cloneInsnList(injector.injection));
				} else {
					Game.logger().info("Injected after: {}", printInsn(node));
					method.instructions.insert(node, cloneInsnList(injector.injection));
				}
			}
		}

		bytes = createBytes(cnode, ClassWriter.COMPUTE_FRAMES);
	}
	return bytes;
}
 
源代码16 项目: samza   文件: ExecutionPlanner.java
/**
 * Groups streams participating in joins together.
 */
private static List<StreamSet> groupJoinedStreams(JobGraph jobGraph) {
  // Group input operator specs (input/intermediate streams) by the joins they participate in.
  Multimap<OperatorSpec, InputOperatorSpec> joinOpSpecToInputOpSpecs =
      OperatorSpecGraphAnalyzer.getJoinToInputOperatorSpecs(
          jobGraph.getApplicationDescriptorImpl().getInputOperators().values());

  Map<String, TableDescriptor> tableDescriptors = jobGraph.getTables().stream()
      .collect(Collectors.toMap(TableDescriptor::getTableId, Function.identity()));

  // Convert every group of input operator specs into a group of corresponding stream edges.
  List<StreamSet> streamSets = new ArrayList<>();
  for (OperatorSpec joinOpSpec : joinOpSpecToInputOpSpecs.keySet()) {
    Collection<InputOperatorSpec> joinedInputOpSpecs = joinOpSpecToInputOpSpecs.get(joinOpSpec);
    StreamSet streamSet = getStreamSet(joinOpSpec.getOpId(), joinedInputOpSpecs, jobGraph);

    // If current join is a stream-table join, add the stream edges corresponding to side-input
    // streams associated with the joined table (if any).
    if (joinOpSpec instanceof StreamTableJoinOperatorSpec) {
      StreamTableJoinOperatorSpec streamTableJoinOperatorSpec = (StreamTableJoinOperatorSpec) joinOpSpec;
      TableDescriptor tableDescriptor = tableDescriptors.get(streamTableJoinOperatorSpec.getTableId());
      if (tableDescriptor instanceof LocalTableDescriptor) {
        LocalTableDescriptor localTableDescriptor = (LocalTableDescriptor) tableDescriptor;
        Collection<String> sideInputs = ListUtils.emptyIfNull(localTableDescriptor.getSideInputs());
        Iterable<StreamEdge> sideInputStreams = sideInputs.stream().map(jobGraph::getStreamEdge)::iterator;
        Iterable<StreamEdge> streams = streamSet.getStreamEdges();
        streamSet = new StreamSet(streamSet.getSetId(), Iterables.concat(streams, sideInputStreams));
      }
    }

    streamSets.add(streamSet);
  }

  return Collections.unmodifiableList(streamSets);
}
 
源代码17 项目: tac-kbp-eal   文件: ResponseMapping.java
public ResponseLinking apply(ResponseLinking responseLinking) {
  final Function<Response, Response> responseMapping = MapUtils.asFunction(replacedResponses,
      Functions.<Response>identity());
  final Predicate<Response> notDeleted = not(in(deletedResponses));

  final ImmutableSet.Builder<ResponseSet> newResponseSetsB = ImmutableSet.builder();
  final ImmutableMap.Builder<ResponseSet, ResponseSet> oldResponseSetToNewB = ImmutableMap.builder();
  for (final ResponseSet responseSet : responseLinking.responseSets()) {
    final ImmutableSet<Response> filteredResponses = FluentIterable.from(responseSet)
        .filter(notDeleted)
        .transform(responseMapping).toSet();
    if (!filteredResponses.isEmpty()) {
      final ResponseSet derived = ResponseSet.from(filteredResponses);
      newResponseSetsB.add(derived);
      oldResponseSetToNewB.put(responseSet, derived);
    }
  }

  final ImmutableSet<ResponseSet> newResponseSets = newResponseSetsB.build();
  final Predicate<Response> notLinked = not(in(ImmutableSet.copyOf(
      Iterables.concat(newResponseSets))));
  final ImmutableSet<Response> newIncompletes =
      FluentIterable.from(responseLinking.incompleteResponses())
          .filter(notDeleted)
          .transform(responseMapping)
          .filter(notLinked)
          .toSet();

  final Optional<ImmutableBiMap<String, ResponseSet>> newResponseSetIDMap;
  if (responseLinking.responseSetIds().isPresent()) {
    final BiMap<String, ResponseSet> responseSetIDs = responseLinking.responseSetIds().get();
    final ImmutableMap<ResponseSet, ResponseSet> oldResponseSetToNew = oldResponseSetToNewB.build();
    final Multimap<ResponseSet, ResponseSet> newResponseToOld = oldResponseSetToNew.asMultimap().inverse();
    final ImmutableBiMap.Builder<String, ResponseSet> newResponseSetIDMapB =
        ImmutableBiMap.builder();
    // since response sets may lose responses and no longer be unique, we choose the earliest alphabetical ID
    for(final ResponseSet nu: newResponseToOld.keySet()) {
      final ImmutableSet.Builder<String> candidateIDsB = ImmutableSet.builder();
      for(final ResponseSet old: newResponseToOld.get(nu)) {
        candidateIDsB.add(responseSetIDs.inverse().get(old));
      }
      final ImmutableSet<String> candidateIDs = candidateIDsB.build();
      final String newID = Ordering.natural().sortedCopy(candidateIDs).get(0);
      if(candidateIDs.size() > 1) {
        // only log if we're converting multiple sets.
        log.debug("Collapsing response sets {} to {}", candidateIDs, newID);
      }
      newResponseSetIDMapB.put(newID, nu);
    }
    newResponseSetIDMap = Optional.of(newResponseSetIDMapB.build());
  } else {
    newResponseSetIDMap = Optional.absent();
  }

  return ResponseLinking.builder().docID(responseLinking.docID())
      .responseSets(newResponseSets).incompleteResponses(newIncompletes)
      .responseSetIds(newResponseSetIDMap).build();
}
 
源代码18 项目: datawave   文件: ProtobufEdgeDataTypeHandler.java
protected EdgeDataBundle createEdge(EdgeDefinition edgeDef, RawRecordContainer event, NormalizedContentInterface ifaceSource, String sourceGroup,
                String sourceSubGroup, NormalizedContentInterface ifaceSink, String sinkGroup, String sinkSubGroup, String edgeAttribute2,
                String edgeAttribute3, Multimap<String,NormalizedContentInterface> normalizedFields,
                Map<String,Multimap<String,NormalizedContentInterface>> depthFirstList, String loadDate, long activityDate, boolean validActivityDate) {
    // Get the edge value
    EdgeDataBundle edgeDataBundle = new EdgeDataBundle(edgeDef, ifaceSource, ifaceSink, event, this.getHelper(event.getDataType()));
    
    if (edgeAttribute2 != null) {
        edgeDataBundle.setEdgeAttribute2(edgeAttribute2);
    }
    if (edgeAttribute3 != null) {
        edgeDataBundle.setEdgeAttribute3(edgeAttribute3);
    }
    
    edgeDataBundle.setLoadDate(loadDate);
    edgeDataBundle.setActivityDate(activityDate);
    edgeDataBundle.setValidActivityDate(validActivityDate);
    
    // setup duration
    if (edgeDef.hasDuration()) {
        if (edgeDef.getUDDuration()) {
            NormalizedContentInterface upnci = getNullKeyedNCI(edgeDef.getUpTime(), normalizedFields);
            NormalizedContentInterface downnci = getNullKeyedNCI(edgeDef.getDownTime(), normalizedFields);
            if (null != upnci && null != downnci) {
                edgeDataBundle.initDuration(upnci, downnci);
            }
        } else {
            NormalizedContentInterface elnci = getNullKeyedNCI(edgeDef.getElapsedTime(), normalizedFields);
            if (null != elnci) {
                edgeDataBundle.initDuration(elnci);
            }
        }
    }
    
    String typeName = event.getDataType().typeName();
    
    // if the edgeDef is an enrichment definition, fill in the enrichedValue
    if (edgeDef.isEnrichmentEdge()) {
        if (edgeTypeLookup.containsKey(typeName)) {
            // if the group is the same as the sink or source,
            // then ensure we use the correct subgroup, otherwise we will enrich with the first value found
            Collection<NormalizedContentInterface> ifaceEnrichs = normalizedFields.get(edgeDef.getEnrichmentField());
            if (null != ifaceEnrichs && !ifaceEnrichs.isEmpty()) {
                String enrichGroup = getGroup(edgeDef.getEnrichmentField());
                if (enrichGroup != NO_GROUP) {
                    if (enrichGroup.equals(sourceGroup)) {
                        ifaceEnrichs = depthFirstList.get(edgeDef.getEnrichmentField()).get(sourceSubGroup);
                    } else if (enrichGroup.equals(sinkGroup)) {
                        ifaceEnrichs = depthFirstList.get(edgeDef.getEnrichmentField()).get(sinkSubGroup);
                    }
                }
                if (null != ifaceEnrichs && !ifaceEnrichs.isEmpty()) {
                    if (ifaceEnrichs.size() > 1) {
                        log.warn("The enrichment field for " + edgeDef.getEnrichmentField() + " contains multiple values...choosing first valid entry");
                        
                    }
                    for (NormalizedContentInterface ifaceEnrich : ifaceEnrichs) {
                        // the value of the enrichment field is a edge type lookup
                        String enrichedIndex = ifaceEnrich.getIndexedFieldValue();
                        // if we know this enrichment mode, then use it
                        if (edgeTypeLookup.get(typeName).containsKey(enrichedIndex)) {
                            edgeDataBundle.setEnrichedIndex(enrichedIndex); // required for eventMetadataRegistry
                            edgeDataBundle.setEnrichedValue(edgeTypeLookup.get(typeName).get(enrichedIndex));
                            break;
                        }
                    }
                }
                
            }
        } else {
            log.error("Cannot enrich edge because Enrichment Edge Types are not defined for data type: " + typeName);
            
        }
    }
    
    if (event.isRequiresMasking()) {
        maskEdge(edgeDataBundle, event);
    }
    // Is this an edge to delete?
    edgeDataBundle.setIsDeleting(this.getHelper(event.getDataType()).getDeleteMode());
    
    // Validate the edge value
    if (!edgeDataBundle.isValid()) {
        return null;
    }
    
    // Set edge type if this is an enrichment edge
    if (edgeDef.isEnrichmentEdge()) {
        if (null != edgeDataBundle.getEnrichedValue()) {
            edgeDataBundle.setEdgeType(edgeDataBundle.getEnrichedValue());
        }
    }
    
    // check value blacklist
    if (this.enableBlacklist && isBlacklistValue(typeName, edgeDataBundle.getSource().getValue(ValueType.INDEXED))
                    || isBlacklistValue(typeName, edgeDataBundle.getSink().getValue(ValueType.INDEXED))) {
        return null;
    }
    
    return edgeDataBundle;
}
 
源代码19 项目: RetroFacebook   文件: CompilationErrorsTest.java
private void assertCompilationResultIs(
    Multimap<Diagnostic.Kind, Pattern> expectedDiagnostics,
    List<String> testSourceCode) throws IOException {
  assertFalse(testSourceCode.isEmpty());

  StringWriter compilerOut = new StringWriter();

  List<String> options = ImmutableList.of(
      "-sourcepath", tmpDir.getPath(),
      "-d", tmpDir.getPath(),
      "-processor", RetroFacebookProcessor.class.getName(),
      "-Xlint");
  javac.getTask(compilerOut, fileManager, diagnosticCollector, options, null, null);
  // This doesn't compile anything but communicates the paths to the JavaFileManager.

  // Convert the strings containing the source code of the test classes into files that we
  // can feed to the compiler.
  List<String> classNames = Lists.newArrayList();
  List<JavaFileObject> sourceFiles = Lists.newArrayList();
  for (String source : testSourceCode) {
    ClassName className = ClassName.extractFromSource(source);
    File dir = new File(tmpDir, className.sourceDirectoryName());
    dir.mkdirs();
    assertTrue(dir.isDirectory());  // True if we just made it, or it was already there.
    String sourceName = className.simpleName + ".java";
    Files.write(source, new File(dir, sourceName), Charsets.UTF_8);
    classNames.add(className.fullName());
    JavaFileObject sourceFile = fileManager.getJavaFileForInput(
        StandardLocation.SOURCE_PATH, className.fullName(), Kind.SOURCE);
    sourceFiles.add(sourceFile);
  }
  assertEquals(classNames.size(), sourceFiles.size());

  // Compile the classes.
  JavaCompiler.CompilationTask javacTask = javac.getTask(
      compilerOut, fileManager, diagnosticCollector, options, classNames, sourceFiles);
  boolean compiledOk = javacTask.call();

  // Check that there were no compilation errors unless we were expecting there to be.
  // We ignore "notes", typically debugging output from the annotation processor
  // when that is enabled.
  Multimap<Diagnostic.Kind, String> diagnostics = ArrayListMultimap.create();
  for (Diagnostic<?> diagnostic : diagnosticCollector.getDiagnostics()) {
    boolean ignore = (diagnostic.getKind() == Diagnostic.Kind.NOTE
        || (diagnostic.getKind() == Diagnostic.Kind.WARNING
            && diagnostic.getMessage(null).contains(
                "No processor claimed any of these annotations")));
    if (!ignore) {
      diagnostics.put(diagnostic.getKind(), diagnostic.getMessage(null));
    }
  }
  assertEquals(diagnostics.containsKey(Diagnostic.Kind.ERROR), !compiledOk);
  assertEquals("Diagnostic kinds should match: " + diagnostics,
      expectedDiagnostics.keySet(), diagnostics.keySet());
  for (Map.Entry<Diagnostic.Kind, Pattern> expectedDiagnostic : expectedDiagnostics.entries()) {
    Collection<String> actualDiagnostics = diagnostics.get(expectedDiagnostic.getKey());
    assertTrue("Diagnostics should contain " + expectedDiagnostic + ": " + diagnostics,
        Iterables.any(actualDiagnostics, Predicates.contains(expectedDiagnostic.getValue())));
  }
}
 
源代码20 项目: datawave   文件: AggregatingReducer.java
/**
 * Can be used to execute this process manually
 * 
 * @param entries
 * @param ctx
 * @throws IOException
 * @throws InterruptedException
 */
public void reduce(Multimap<IK,IV> entries, TaskInputOutputContext<?,?,OK,OV> ctx) throws IOException, InterruptedException {
    for (IK key : entries.keySet()) {
        Collection<IV> values = entries.get(key);
        this.doReduce(key, values, ctx);
    }
}