下面列出了com.google.common.base.Verify#verifyNotNull ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
TyVarInfo getTyVarInfo(TyVarSymbol tyVar) {
Symbol owner = tyVar.owner();
Verify.verifyNotNull(owner); // TODO(cushon): capture variables
ImmutableMap<TyVarSymbol, TyVarInfo> tyParams;
switch (owner.symKind()) {
case METHOD:
tyParams = getMethodInfo((MethodSymbol) owner).tyParams();
break;
case CLASS:
tyParams = getSymbol((ClassSymbol) owner).typeParameterTypes();
break;
default:
throw new AssertionError(owner.symKind());
}
return tyParams.get(tyVar);
}
/**
* Returns a list of commits between two references.
*
* @param repoPath path to local git repository.
* @param startRef start reference.
* @param endRef end reference.
* @return a list of commits.
* @throws IOException if I/O error occurs.
* @throws GitAPIException if an error occurs when accessing Git API.
*/
private static Set<RevCommit> getCommitsBetweenReferences(String repoPath, String startRef,
String endRef) throws IOException, GitAPIException {
final FileRepositoryBuilder builder = new FileRepositoryBuilder();
final Path path = Paths.get(repoPath);
final Repository repo = builder.findGitDir(path.toFile()).readEnvironment().build();
final ObjectId startCommit = getActualRefObjectId(repo, startRef);
Verify.verifyNotNull(startCommit, "Start reference \"" + startRef + "\" is invalid!");
final ObjectId endCommit = getActualRefObjectId(repo, endRef);
final Iterable<RevCommit> commits =
new Git(repo).log().addRange(startCommit, endCommit).call();
return Sets.newLinkedHashSet(commits);
}
/**
* Finds the given environment in the given set and returns the default environments for its
* group.
*/
private static Collection<EnvironmentWithGroup> getDefaults(Label env,
EnvironmentCollection allEnvironments) {
EnvironmentLabels group = null;
for (EnvironmentLabels candidateGroup : allEnvironments.getGroups()) {
if (candidateGroup.getDefaults().contains(env)) {
group = candidateGroup;
break;
}
}
Verify.verifyNotNull(group);
ImmutableSet.Builder<EnvironmentWithGroup> builder = ImmutableSet.builder();
for (Label defaultEnv : group.getDefaults()) {
builder.add(EnvironmentWithGroup.create(defaultEnv, group));
}
return builder.build();
}
private static Bootstrap initBootStrap() {
Bootstrap cb = new Bootstrap();
Verify.verifyNotNull(cb);
cb.group(ApplicationRuntime.FTDC_LOOP_GROUP);
cb.channel(NioSocketChannel.class);
cb.option(ChannelOption.SO_KEEPALIVE, false);
return cb;
}
@Override
public void onRspError(RspError error, int reqId) {
UserSession session = session();
if (session == null) {
logger.error(LOGGR_TEMPLATE_ERROR, reqId);
return;
}
RequestIdentity requestIdentity = ApplicationRuntime.unbindRequestIdentity(session.getBrokerId(),
session.getUserID(), reqId);
Verify.verifyNotNull(requestIdentity, VERIFY_TEMPLATE, requestIdentity);
doRspError(requestIdentity, error);
}
private static StmtContext<?, ?, ?> getParentAugmentation(final StmtContext<?, ?, ?> child) {
StmtContext<?, ?, ?> parent = Verify.verifyNotNull(child.getParentContext(), "Child %s has not parent", child);
while (parent.getPublicDefinition() != YangStmtMapping.AUGMENT) {
parent = Verify.verifyNotNull(parent.getParentContext(), "Failed to find augmentation parent of %s", child);
}
return parent;
}
private FtdcTraderSpi getSpi(Channel ftdcChannel) {
FtdcTraderSpi ftdcTraderSpi = null;
Verify.verifyNotNull(ftdcChannel, "ftdcChannel is null");
if(ftdcChannel.hasAttr(FtdcTraderSpi.TRADER_API)) {
Attribute<FtdcTraderSpi> attr = ftdcChannel.attr(FtdcTraderSpi.TRADER_API);
ftdcTraderSpi = attr.get();
}
Verify.verifyNotNull(ftdcTraderSpi, "FtdcTraderSpi not register, pls register it first");
return ftdcTraderSpi;
}
@Override
public int getLineNumber(int inputPosition) {
Verify.verifyNotNull(unit, "Expected compilation unit to be set.");
return unit.getLineMap().getLineNumber(inputPosition);
}
@Override
public int getColumnNumber(int inputPosition) {
Verify.verifyNotNull(unit, "Expected compilation unit to be set.");
return unit.getLineMap().getColumnNumber(inputPosition);
}
/**
* Returns the expected environments that the given top-level target doesn't support.
*
* @param topLevelTarget the top-level target to check
* @param expectedEnvironmentLabels the environments this target is expected to support
*
* @throw InterruptedException if environment target resolution fails
* @throw ViewCreationFailedException if an expected environment isn't a valid target
*/
private Collection<MissingEnvironment> getMissingEnvironments(ConfiguredTarget topLevelTarget,
Collection<Label> expectedEnvironmentLabels)
throws InterruptedException, ViewCreationFailedException {
if (expectedEnvironmentLabels.isEmpty()) {
return ImmutableList.of();
}
// Convert expected environment labels to actual environments.
EnvironmentCollection.Builder expectedEnvironmentsBuilder = new EnvironmentCollection.Builder();
for (Label envLabel : expectedEnvironmentLabels) {
try {
Target env = packageManager.getTarget(eventHandler, envLabel);
expectedEnvironmentsBuilder.put(
ConstraintSemantics.getEnvironmentGroup(env).getEnvironmentLabels(), envLabel);
} catch (NoSuchPackageException | NoSuchTargetException
| ConstraintSemantics.EnvironmentLookupException e) {
throw new ViewCreationFailedException("invalid target environment", e);
}
}
EnvironmentCollection expectedEnvironments = expectedEnvironmentsBuilder.build();
// Dereference any aliases that might be present.
topLevelTarget = topLevelTarget.getActual();
// Now check the target against expected environments.
TransitiveInfoCollection asProvider;
if (topLevelTarget instanceof OutputFileConfiguredTarget) {
asProvider = ((OutputFileConfiguredTarget) topLevelTarget).getGeneratingRule();
} else {
asProvider = topLevelTarget;
}
SupportedEnvironmentsProvider provider =
Verify.verifyNotNull(asProvider.getProvider(SupportedEnvironmentsProvider.class));
return RuleContextConstraintSemantics.getUnsupportedEnvironments(
provider.getRefinedEnvironments(), expectedEnvironments)
.stream()
// We apply this filter because the target might also not support default environments in
// other environment groups. We don't care about those. We only care about the environments
// explicitly referenced.
.filter(Predicates.in(expectedEnvironmentLabels))
.map(
environment ->
new MissingEnvironment(
environment, provider.getRemovedEnvironmentCulprit(environment)))
.collect(Collectors.toSet());
}
/**
* Helper method for {@link #checkConstraints}: refines a rule's environments with the given dep.
*
* <p>A rule's <b>complete</b> refined set applies this process to every dep.
*/
private static void refineEnvironmentsForDep(
RuleContext ruleContext,
EnvironmentCollection staticEnvironments,
TransitiveInfoCollection dep,
Map<Label, EnvironmentWithGroup> labelsToEnvironments,
Set<EnvironmentWithGroup> refinedEnvironmentsSoFar,
Set<EnvironmentLabels> groupsWithEnvironmentsRemoved,
Map<Label, RemovedEnvironmentCulprit> removedEnvironmentCulprits) {
SupportedEnvironmentsProvider depEnvironments =
dep.getProvider(SupportedEnvironmentsProvider.class);
// Stores the environments that are pruned from the refined set because of this dep. Even
// though they're removed, some subset of the environments they fulfill may belong in the
// refined set. For example, if environment "both" fulfills "a" and "b" and "lib" statically
// sets restricted_to = ["both"] and "dep" sets restricted_to = ["a"], then lib's refined set
// excludes "both". But rather than be emptied out it can be reduced to "a".
Set<Label> prunedEnvironmentsFromThisDep = new LinkedHashSet<>();
// Refine this rule's environments by intersecting with the dep's refined environments:
for (Label refinedEnvironmentToPrune : getUnsupportedEnvironments(
depEnvironments.getRefinedEnvironments(), staticEnvironments)) {
EnvironmentWithGroup envToPrune = labelsToEnvironments.get(refinedEnvironmentToPrune);
if (envToPrune == null) {
// If we have no record of this environment, that means the current rule implicitly uses
// the defaults for this group. So explicitly opt that group's defaults into the refined
// set before trying to remove specific items.
for (EnvironmentWithGroup defaultEnv :
getDefaults(refinedEnvironmentToPrune, depEnvironments.getRefinedEnvironments())) {
refinedEnvironmentsSoFar.add(defaultEnv);
labelsToEnvironments.put(defaultEnv.environment(), defaultEnv);
}
envToPrune = Verify.verifyNotNull(labelsToEnvironments.get(refinedEnvironmentToPrune));
}
refinedEnvironmentsSoFar.remove(envToPrune);
groupsWithEnvironmentsRemoved.add(envToPrune.group());
removedEnvironmentCulprits.put(envToPrune.environment(),
findOriginalRefiner(ruleContext, dep.getLabel(), depEnvironments, envToPrune));
prunedEnvironmentsFromThisDep.add(envToPrune.environment());
}
// Add in any dep environment that one of the environments we removed fulfills. In other
// words, the removed environment is no good, but some subset of it may be.
for (EnvironmentWithGroup depEnv :
depEnvironments.getRefinedEnvironments().getGroupedEnvironments()) {
for (Label fulfiller : depEnv.group().getFulfillers(depEnv.environment()).toList()) {
if (prunedEnvironmentsFromThisDep.contains(fulfiller)) {
refinedEnvironmentsSoFar.add(depEnv);
}
}
}
}
@Override
public void reqAuthAndLogin(RequestIdentity requestIdentity, FtdcReq userLogin) {
Verify.verifyNotNull(sas, "pls register front first");
FtdClientPool pool = FtdClientPool.getPool();
pool.acquire(sas).addListener(new AuthListener(requestIdentity, TID.Auth, Sequence.Auth, userLogin));
}
/**
* Unbinds the given config key set.
*/
public void unbind(Set<Label> allKeys) {
Verify.verifyNotNull(bindings.remove(allKeys));
}
public RelationalInputBuilder addRow(String... row) {
Verify.verifyNotNull(header, "set header first!");
Verify.verify(row.length == header.length);
data.add(row);
return this;
}
@Override
public int getLineNumber(int inputPosition) {
Verify.verifyNotNull(unit, "Expected compilation unit to be set.");
return unit.getLineMap().getLineNumber(inputPosition);
}
/**
* Applies this OptionsPolicyEnforcer's policy to the given OptionsParser.
*
* @param parser The OptionsParser to enforce policy on.
* @param command The current blaze command, for flag policies that apply to only specific
* commands. Such policies will be enforced only if they contain this command or a command
* they inherit from
* @throws OptionsParsingException if any flag policy is invalid.
*/
public void enforce(OptionsParser parser, @Nullable String command)
throws OptionsParsingException {
if (invocationPolicy == null || invocationPolicy.getFlagPoliciesCount() == 0) {
return;
}
// The effective policy returned is expanded, filtered for applicable commands, and cleaned of
// redundancies and conflicts.
List<FlagPolicyWithContext> effectivePolicies =
getEffectivePolicies(invocationPolicy, parser, command, loglevel);
for (FlagPolicyWithContext flagPolicy : effectivePolicies) {
String flagName = flagPolicy.policy.getFlagName();
OptionValueDescription valueDescription;
try {
valueDescription = parser.getOptionValueDescription(flagName);
} catch (IllegalArgumentException e) {
// This flag doesn't exist. We are deliberately lenient if the flag policy has a flag
// we don't know about. This is for better future proofing so that as new flags are added,
// new policies can use the new flags without worrying about older versions of Bazel.
logger.at(loglevel).log(
"Flag '%s' specified by invocation policy does not exist", flagName);
continue;
}
// getOptionDescription() will return null if the option does not exist, however
// getOptionValueDescription() above would have thrown an IllegalArgumentException if that
// were the case.
Verify.verifyNotNull(flagPolicy.description);
switch (flagPolicy.policy.getOperationCase()) {
case SET_VALUE:
applySetValueOperation(parser, flagPolicy, valueDescription, loglevel);
break;
case USE_DEFAULT:
applyUseDefaultOperation(
parser, "UseDefault", flagPolicy.description.getOptionDefinition(), loglevel);
break;
case ALLOW_VALUES:
AllowValues allowValues = flagPolicy.policy.getAllowValues();
FilterValueOperation.AllowValueOperation allowValueOperation =
new FilterValueOperation.AllowValueOperation(loglevel);
allowValueOperation.apply(
parser,
flagPolicy.origin,
allowValues.getAllowedValuesList(),
allowValues.hasNewValue() ? allowValues.getNewValue() : null,
allowValues.hasUseDefault(),
valueDescription,
flagPolicy.description);
break;
case DISALLOW_VALUES:
DisallowValues disallowValues = flagPolicy.policy.getDisallowValues();
FilterValueOperation.DisallowValueOperation disallowValueOperation =
new FilterValueOperation.DisallowValueOperation(loglevel);
disallowValueOperation.apply(
parser,
flagPolicy.origin,
disallowValues.getDisallowedValuesList(),
disallowValues.hasNewValue() ? disallowValues.getNewValue() : null,
disallowValues.hasUseDefault(),
valueDescription,
flagPolicy.description);
break;
case OPERATION_NOT_SET:
throw new PolicyOperationNotSetException(flagName);
default:
logger.atWarning().log(
"Unknown operation '%s' from invocation policy for flag '%s'",
flagPolicy.policy.getOperationCase(), flagName);
break;
}
}
}
@Override
public boolean finish() {
List<Library> libraries = getSelectedLibraries();
try {
if (isMavenProject) {
// remove any library that wasn't selected
Set<Library> removed = new HashSet<>(getAvailableLibraries());
removed.removeAll(libraries);
// No need for an Analytics ping here; addMavenLibraries will do it.
BuildPath.updateMavenLibraries(project.getProject(), libraries, removed,
new NullProgressMonitor());
} else {
if (!libraries.isEmpty()) {
AnalyticsLibraryPingHelper.sendLibrarySelectionPing(
AnalyticsEvents.NATIVE_PROJECT, libraries);
}
/*
* FIXME: BuildPath.addNativeLibrary() is too heavy-weight here. ClasspathContainerWizard,
* our wizard, is responsible for installing the classpath entry returned by getSelection(),
* which will perform the library resolution. We just need to save the selected libraries
* so that they are resolved later.
*/
BuildPath.saveLibraryList(project, libraries, new NullProgressMonitor());
Library masterLibrary =
BuildPath.collectLibraryFiles(project, libraries, new NullProgressMonitor());
// skip computeEntry() if we have an existing entry: unnecessary and simplifies testing too
if (originalEntry == null) {
newEntry = BuildPath.computeEntry(project, masterLibrary, new NullProgressMonitor());
Verify.verifyNotNull(newEntry); // new entry should be created
} else {
// request update of existing entry
ClasspathContainerInitializer initializer =
JavaCore.getClasspathContainerInitializer(
LibraryClasspathContainer.CONTAINER_PATH_PREFIX);
// this is always true for our initializer
if (initializer.canUpdateClasspathContainer(originalEntry.getPath(), project)) {
// existing entry needs to be updated
initializer.requestClasspathContainerUpdate(
originalEntry.getPath(), project, null /*containerSuggestion*/);
}
}
}
return true;
} catch (CoreException ex) {
StatusUtil.setErrorStatus(this, "Error updating container definition", ex); //$NON-NLS-1$
return false;
}
}
private static Label getDefaultMallocLabel(Rule rule) {
return Verify.verifyNotNull(
(Label) rule.getRuleClassObject().getAttributeByName("malloc").getDefaultValueUnchecked());
}
/**
* Verify options and set defaults.
*
* @return new CliOption instance
*/
public CliOptions build() {
if (endRef == null) {
endRef = "HEAD";
}
if (outputLocation == null) {
outputLocation = "";
}
Verify.verifyNotNull(localRepoPath,
"Path to a local git repository should not be null!");
Verify.verifyNotNull(remoteRepoPath,
"Path to a remote github repository should not be null!");
Verify.verifyNotNull(startRef, "Start reference should not be null!");
Verify.verifyNotNull(releaseNumber, "Release number should not be null!");
if (shouldLoadTwitterProperties()) {
Verify.verifyNotNull(twitterProperties, "Properties file for Twitter is expected"
+ " if some of the following options are not entered: twitterConsumerKey, "
+ "twitterConsumerSecret, twitterAccessToken, twitterAccessTokenSecret.");
loadTwitterProperties();
Verify.verifyNotNull(twitterConsumerKey, "Consumer key for Twitter is expected!");
Verify.verifyNotNull(twitterConsumerSecret,
"Consumer secret for Twitter is expected!");
Verify.verifyNotNull(twitterAccessToken, "Access token for Twitter is expected!");
Verify.verifyNotNull(twitterAccessTokenSecret,
"Access token secret for Twitter is expected!");
}
if (shouldLoadMlistProperties()) {
Verify.verifyNotNull(mlistProperties, "Properties file for mailing list is "
+ "expected if some of the following options are not entered: mlistUsername, "
+ "mlistPassword.");
loadMlistProperties();
Verify.verifyNotNull(mlistUsername, "Username for mailing list is expected!");
Verify.verifyNotNull(mlistPassword, "Password for mailing list is expected!");
}
if ((publishAllSocial || publishSfRss) && sfRssBearerToken == null) {
Verify.verifyNotNull(sfRssProperties, "Properties file for RSS is expected"
+ " if some of the following options are not entered: sfRssBearerToken.");
loadSfRssProperties();
Verify.verifyNotNull(sfRssBearerToken, "sfRssBearerToken for RSS is expected!");
}
return getNewCliOptionsInstance();
}
@Override
public int getColumnNumber(int inputPosition) {
Verify.verifyNotNull(unit, "Expected compilation unit to be set.");
return unit.getLineMap().getColumnNumber(inputPosition);
}