com.google.common.base.Optional#equals ( )源码实例Demo

下面列出了com.google.common.base.Optional#equals ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: api-mining   文件: ProbabilityUtils.java
/**
 * Compare if two conditional probabilities are equal in the
 * maximum-likelihood view.
 * 
 * @param cpd1
 * @param cpd2
 * @return
 */
public static <A, B> boolean conditionalProbabiltiesEquivalentInML(
		final IDiscreteConditionalProbability<A, B> cpd1,
		final IDiscreteConditionalProbability<A, B> cpd2) {
	final Set<B> support1 = cpd1.getPossibleContexts();
	final Set<B> support2 = cpd2.getPossibleContexts();

	if (!support1.equals(support2)) {
		return false;
	}

	for (final B context : support1) {
		final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context);
		final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context);
		if (!ml1.equals(ml2)) {
			return false;
		}
	}
	return true;
}
 
源代码2 项目: tassal   文件: ProbabilityUtils.java
/**
 * Compare if two conditional probabilities are equal in the
 * maximum-likelihood view.
 * 
 * @param cpd1
 * @param cpd2
 * @return
 */
public static <A, B> boolean conditionalProbabiltiesEquivalentInML(
		final IDiscreteConditionalProbability<A, B> cpd1,
		final IDiscreteConditionalProbability<A, B> cpd2) {
	final Set<B> support1 = cpd1.getPossibleContexts();
	final Set<B> support2 = cpd2.getPossibleContexts();

	if (!support1.equals(support2)) {
		return false;
	}

	for (final B context : support1) {
		final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context);
		final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context);
		if (!ml1.equals(ml2)) {
			return false;
		}
	}
	return true;
}
 
源代码3 项目: bazel   文件: PublicXmlResourceValue.java
@Override
public XmlResourceValue combineWith(XmlResourceValue value) {
  if (!(value instanceof PublicXmlResourceValue)) {
    throw new IllegalArgumentException(value + "is not combinable with " + this);
  }
  PublicXmlResourceValue other = (PublicXmlResourceValue) value;
  Map<ResourceType, Optional<Integer>> combined = new EnumMap<>(ResourceType.class);
  combined.putAll(typeToId);
  for (Map.Entry<ResourceType, Optional<Integer>> entry : other.typeToId.entrySet()) {
    Optional<Integer> existing = combined.get(entry.getKey());
    if (existing != null && !existing.equals(entry.getValue())) {
      throw new IllegalArgumentException(
          String.format(
              "Public resource of type %s assigned two different id values 0x%x and 0x%x",
              entry.getKey(), existing.orNull(), entry.getValue().orNull()));
    }
    combined.put(entry.getKey(), entry.getValue());
  }
  return of(combined);
}
 
源代码4 项目: api-mining   文件: ProbabilityUtils.java
/**
 * Print the differences among two CPDs.
 * 
 * @param cpd1
 * @param cpd2
 */
public static <A, B> void printClusterDifferences(
		final IDiscreteConditionalProbability<A, B> cpd1,
		final IDiscreteConditionalProbability<A, B> cpd2) {
	final Set<B> support1 = cpd1.getPossibleContexts();

	for (final B context : support1) {
		final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context);
		final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context);
		if (!ml1.equals(ml2)) {
			System.out.println("Context " + context + ": " + ml1.orNull()
					+ " vs " + ml2.orNull());
		}
	}
}
 
源代码5 项目: dagger2-sample   文件: BindingGraph.java
private Optional<RequestResolver> getOwningResolver(ProvisionBinding provisionBinding) {
  Optional<Equivalence.Wrapper<AnnotationMirror>> bindingScope =
      provisionBinding.wrappedScope();
  for (RequestResolver requestResolver : getResolverLineage()) {
    if (bindingScope.equals(requestResolver.targetScope)
        || requestResolver.explicitProvisionBindings.containsValue(provisionBinding)) {
      return Optional.of(requestResolver);
    }
  }
  return Optional.absent();
}
 
源代码6 项目: tassal   文件: ProbabilityUtils.java
/**
 * Print the differences among two CPDs.
 * 
 * @param cpd1
 * @param cpd2
 */
public static <A, B> void printClusterDifferences(
		final IDiscreteConditionalProbability<A, B> cpd1,
		final IDiscreteConditionalProbability<A, B> cpd2) {
	final Set<B> support1 = cpd1.getPossibleContexts();

	for (final B context : support1) {
		final Optional<A> ml1 = cpd1.getMaximumLikelihoodElement(context);
		final Optional<A> ml2 = cpd2.getMaximumLikelihoodElement(context);
		if (!ml1.equals(ml2)) {
			System.out.println("Context " + context + ": " + ml1.orNull()
					+ " vs " + ml2.orNull());
		}
	}
}
 
@VisibleForTesting
KafkaDeserializerExtractor(WorkUnitState state, Optional<Deserializers> deserializerType,
    Deserializer<?> kafkaDeserializer, KafkaSchemaRegistry<?, ?> kafkaSchemaRegistry) {
  super(state);
  this.kafkaDeserializer = kafkaDeserializer;
  this.kafkaSchemaRegistry = kafkaSchemaRegistry;
  this.latestSchema =
      (deserializerType.equals(Optional.of(Deserializers.CONFLUENT_AVRO))) ? (Schema) getSchema() : null;
}
 
源代码8 项目: Baragon   文件: IfEqualHelperSource.java
public static CharSequence ifOptionalEqual(Optional<String> v1, Optional<String> v2, Options options) throws IOException {
  if (v1.equals(v2)) {
    return options.fn();
  } else {
    return options.inverse();
  }
}
 
源代码9 项目: TakinRPC   文件: BaseState.java
@VisibleForTesting
boolean shouldVoteFor(@Nonnull RaftLog log, @Nonnull RequestVote request) {

    Optional<Replica> lastVotedFor = log.lastVotedFor();
    Replica candidate = Replica.fromString(request.getCandidateId());

    boolean hasAtLeastTerm = request.getLastLogTerm() >= log.lastLogTerm();
    boolean hasAtLeastIndex = request.getLastLogIndex() >= log.lastLogIndex();

    boolean logAsComplete = (hasAtLeastTerm && hasAtLeastIndex);

    boolean alreadyVotedForCandidate = lastVotedFor.equals(Optional.of(candidate));
    boolean notYetVoted = !lastVotedFor.isPresent();

    return (alreadyVotedForCandidate && logAsComplete) || (notYetVoted && logAsComplete) || (request.getLastLogTerm() > log.lastLogTerm()) || (hasAtLeastTerm && (request.getLastLogIndex() > log.lastLogIndex())) || logAsComplete;

}
 
源代码10 项目: dremio-oss   文件: ScanWithHiveReader.java
private static RecordReader getRecordReader(HiveSplitXattr splitXattr, HiveTableXattr tableXattr,
                                            OperatorContext context, HiveConf hiveConf,
                                            SplitAndPartitionInfo split, CompositeReaderConfig compositeReader,
                                            HiveProxyingSubScan config, UserGroupInformation readerUgi)
  throws Exception {

  final JobConf baseJobConf = new JobConf(hiveConf);
  final Properties tableProperties = new Properties();
  addProperties(baseJobConf, tableProperties, HiveReaderProtoUtil.getTableProperties(tableXattr));

  final boolean isTransactional = AcidUtils.isTablePropertyTransactional(baseJobConf);
  final boolean isPartitioned = config.getPartitionColumns() != null && config.getPartitionColumns().size() > 0;
  final Optional<String> tableInputFormat = HiveReaderProtoUtil.getTableInputFormat(tableXattr);
  final JobConf jobConf = new JobConf(baseJobConf);

  final SerDe tableSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getTableSerializationLib(tableXattr).get(),
      tableProperties);
  final StructObjectInspector tableOI = getStructOI(tableSerDe);
  final SerDe partitionSerDe;
  final StructObjectInspector partitionOI;

  boolean hasDeltas = false;
  if (isTransactional) {
    InputSplit inputSplit = HiveUtilities.deserializeInputSplit(splitXattr.getInputSplit());
    if (inputSplit instanceof OrcSplit) {
      hasDeltas = hasDeltas((OrcSplit) inputSplit);
    }
  }

  final Class<? extends HiveAbstractReader> tableReaderClass =
    getNativeReaderClass(tableInputFormat, context.getOptions(), hiveConf, false, isTransactional && hasDeltas);

  final Constructor<? extends HiveAbstractReader> tableReaderCtor = getNativeReaderCtor(tableReaderClass);

  Constructor<? extends HiveAbstractReader> readerCtor = tableReaderCtor;
  // It is possible to for a partition to have different input format than table input format.
  if (isPartitioned) {
    final List<Prop> partitionPropertiesList;
    final Properties partitionProperties = new Properties();
    final Optional<String> partitionInputFormat;
    final Optional<String> partitionStorageHandlerName;
    // First add table properties and then add partition properties. Partition properties override table properties.
    addProperties(jobConf, partitionProperties, HiveReaderProtoUtil.getTableProperties(tableXattr));

    // If Partition Properties are stored in DatasetMetadata (Pre 3.2.0)
    if (HiveReaderProtoUtil.isPreDremioVersion3dot2dot0LegacyFormat(tableXattr)) {
      logger.debug("Reading partition properties from DatasetMetadata");
      partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, splitXattr.getPartitionId());
      addProperties(jobConf, partitionProperties, partitionPropertiesList);
      partitionSerDe =
        createSerDe(jobConf,
          HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, splitXattr.getPartitionId()).get(),
          partitionProperties
        );
      partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, splitXattr.getPartitionId());
      partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, splitXattr.getPartitionId());

    } else {
      logger.debug("Reading partition properties from PartitionChunk");
      final PartitionXattr partitionXattr = HiveReaderProtoUtil.getPartitionXattr(split);
      partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, partitionXattr);
      addProperties(jobConf, partitionProperties, partitionPropertiesList);
      partitionSerDe =
        createSerDe(jobConf,
          HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, partitionXattr),
          partitionProperties
        );
      partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, partitionXattr);
      partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr);
    }

    jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName));
    partitionOI = getStructOI(partitionSerDe);

    final boolean mixedSchema = !tableOI.equals(partitionOI);
    if (!partitionInputFormat.equals(tableInputFormat) || mixedSchema || isTransactional && hasDeltas) {
      final Class<? extends HiveAbstractReader> partitionReaderClass = getNativeReaderClass(
        partitionInputFormat, context.getOptions(), jobConf, mixedSchema, isTransactional);
      readerCtor = getNativeReaderCtor(partitionReaderClass);
    }
  } else {
    partitionSerDe = null;
    partitionOI = null;
    jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr)));
  }

  return readerCtor.newInstance(tableXattr, split,
      compositeReader.getInnerColumns(), context, jobConf, tableSerDe, tableOI, partitionSerDe,
      partitionOI, config.getFilter(), config.getReferencedTables(), readerUgi);
}
 
源代码11 项目: dremio-oss   文件: ScanWithHiveReader.java
private static RecordReader getRecordReader(HiveSplitXattr splitXattr, HiveTableXattr tableXattr,
                                            OperatorContext context, HiveConf hiveConf,
                                            SplitAndPartitionInfo split, CompositeReaderConfig compositeReader,
                                            HiveProxyingSubScan config, UserGroupInformation readerUgi)
  throws Exception {

  final JobConf baseJobConf = new JobConf(hiveConf);
  final Properties tableProperties = new Properties();
  addProperties(baseJobConf, tableProperties, HiveReaderProtoUtil.getTableProperties(tableXattr));

  final boolean isTransactional = AcidUtils.isTablePropertyTransactional(baseJobConf);
  final boolean isPartitioned = config.getPartitionColumns() != null && config.getPartitionColumns().size() > 0;
  final Optional<String> tableInputFormat = HiveReaderProtoUtil.getTableInputFormat(tableXattr);
  final JobConf jobConf = new JobConf(baseJobConf);

  final AbstractSerDe tableSerDe = createSerDe(jobConf, HiveReaderProtoUtil.getTableSerializationLib(tableXattr).get(),
      tableProperties);
  final StructObjectInspector tableOI = getStructOI(tableSerDe);
  final AbstractSerDe partitionSerDe;
  final StructObjectInspector partitionOI;

  boolean hasDeltas = false;
  if (isTransactional) {
    InputSplit inputSplit = HiveUtilities.deserializeInputSplit(splitXattr.getInputSplit());
    if (inputSplit instanceof OrcSplit) {
      hasDeltas = hasDeltas((OrcSplit) inputSplit);
    }
  }

  final Class<? extends HiveAbstractReader> tableReaderClass =
    getNativeReaderClass(tableInputFormat, context.getOptions(), hiveConf, false, isTransactional && hasDeltas);

  final Constructor<? extends HiveAbstractReader> tableReaderCtor = getNativeReaderCtor(tableReaderClass);

  Constructor<? extends HiveAbstractReader> readerCtor = tableReaderCtor;
  // It is possible to for a partition to have different input format than table input format.
  if (isPartitioned) {
    final List<Prop> partitionPropertiesList;
    final Properties partitionProperties = new Properties();
    final Optional<String> partitionInputFormat;
    final Optional<String> partitionStorageHandlerName;
    // First add table properties and then add partition properties. Partition properties override table properties.
    addProperties(jobConf, partitionProperties, HiveReaderProtoUtil.getTableProperties(tableXattr));

    // If Partition Properties are stored in DatasetMetadata (Pre 3.2.0)
    if (HiveReaderProtoUtil.isPreDremioVersion3dot2dot0LegacyFormat(tableXattr)) {
      logger.debug("Reading partition properties from DatasetMetadata");
      partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, splitXattr.getPartitionId());
      addProperties(jobConf, partitionProperties, partitionPropertiesList);
      partitionSerDe =
        createSerDe(jobConf,
          HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, splitXattr.getPartitionId()).get(),
          partitionProperties
        );
      partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, splitXattr.getPartitionId());
      partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, splitXattr.getPartitionId());

    } else {
      logger.debug("Reading partition properties from PartitionChunk");
      final PartitionXattr partitionXattr = HiveReaderProtoUtil.getPartitionXattr(split);
      partitionPropertiesList = HiveReaderProtoUtil.getPartitionProperties(tableXattr, partitionXattr);
      addProperties(jobConf, partitionProperties, partitionPropertiesList);
      partitionSerDe =
        createSerDe(jobConf,
          HiveReaderProtoUtil.getPartitionSerializationLib(tableXattr, partitionXattr),
          partitionProperties
        );
      partitionInputFormat = HiveReaderProtoUtil.getPartitionInputFormat(tableXattr, partitionXattr);
      partitionStorageHandlerName = HiveReaderProtoUtil.getPartitionStorageHandler(tableXattr, partitionXattr);
    }

    jobConf.setInputFormat(getInputFormatClass(jobConf, partitionInputFormat, partitionStorageHandlerName));
    partitionOI = getStructOI(partitionSerDe);

    final boolean mixedSchema = !tableOI.equals(partitionOI);
    if (!partitionInputFormat.equals(tableInputFormat) || mixedSchema || isTransactional && hasDeltas) {
      final Class<? extends HiveAbstractReader> partitionReaderClass = getNativeReaderClass(
        partitionInputFormat, context.getOptions(), jobConf, mixedSchema, isTransactional);
      readerCtor = getNativeReaderCtor(partitionReaderClass);
    }
  } else {
    partitionSerDe = null;
    partitionOI = null;
    jobConf.setInputFormat(getInputFormatClass(jobConf, tableInputFormat, HiveReaderProtoUtil.getTableStorageHandler(tableXattr)));
  }

  return readerCtor.newInstance(tableXattr, split,
      compositeReader.getInnerColumns(), context, jobConf, tableSerDe, tableOI, partitionSerDe,
      partitionOI, config.getFilter(), config.getReferencedTables(), readerUgi);
}
 
源代码12 项目: dagger2-sample   文件: BindingGraphValidator.java
/**
 * Validates that the scope (if any) of this component are compatible with the scopes of the
 * bindings available in this component
 */
void validateComponentScope(final BindingGraph subject,
    final ValidationReport.Builder<BindingGraph> reportBuilder,
    ImmutableMap<BindingKey, ResolvedBindings> resolvedBindings) {
  Optional<Equivalence.Wrapper<AnnotationMirror>> componentScope =
      subject.componentDescriptor().wrappedScope();
  ImmutableSet.Builder<String> incompatiblyScopedMethodsBuilder = ImmutableSet.builder();
  for (ResolvedBindings bindings : resolvedBindings.values()) {
    if (bindings.bindingKey().kind().equals(BindingKey.Kind.CONTRIBUTION)) {
      for (ContributionBinding contributionBinding : bindings.ownedContributionBindings()) {
        if (contributionBinding instanceof ProvisionBinding) {
          ProvisionBinding provisionBinding = (ProvisionBinding) contributionBinding;
          if (provisionBinding.scope().isPresent()
              && !componentScope.equals(provisionBinding.wrappedScope())) {
            // Scoped components cannot reference bindings to @Provides methods or @Inject
            // types decorated by a different scope annotation. Unscoped components cannot
            // reference to scoped @Provides methods or @Inject types decorated by any
            // scope annotation.
            switch (provisionBinding.bindingKind()) {
              case PROVISION:
                ExecutableElement provisionMethod =
                    MoreElements.asExecutable(provisionBinding.bindingElement());
                incompatiblyScopedMethodsBuilder.add(
                    methodSignatureFormatter.format(provisionMethod));
                break;
              case INJECTION:
                incompatiblyScopedMethodsBuilder.add(stripCommonTypePrefixes(
                    provisionBinding.scope().get().toString()) + " class "
                        + provisionBinding.bindingTypeElement().getQualifiedName());
                break;
              default:
                throw new IllegalStateException();
            }
          }
        }
      }
    }
  }
  ImmutableSet<String> incompatiblyScopedMethods = incompatiblyScopedMethodsBuilder.build();
  if (!incompatiblyScopedMethods.isEmpty()) {
    TypeElement componentType = subject.componentDescriptor().componentDefinitionType();
    StringBuilder message = new StringBuilder(componentType.getQualifiedName());
    if (componentScope.isPresent()) {
      message.append(" scoped with ");
      message.append(stripCommonTypePrefixes(ErrorMessages.format(componentScope.get().get())));
      message.append(" may not reference bindings with different scopes:\n");
    } else {
      message.append(" (unscoped) may not reference scoped bindings:\n");
    }
    for (String method : incompatiblyScopedMethods) {
      message.append(ErrorMessages.INDENT).append(method).append("\n");
    }
    reportBuilder.addItem(message.toString(), componentType,
        subject.componentDescriptor().componentAnnotation());
  }
}