下面列出了com.google.common.base.Optional#get ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public PrecomputedJoinStorage get() {
// Ensure a configuration has been set.
final Configuration config = configSupplier.get();
checkNotNull(config, "Could not build the PrecomputedJoinStorage until the PrecomputedJoinIndexer has been configured.");
final PrecomputedJoinIndexerConfig indexerConfig = new PrecomputedJoinIndexerConfig(config);
// Ensure the storage type has been set.
final Optional<PrecomputedJoinStorageType> storageType = indexerConfig.getPcjStorageType();
checkArgument(storageType.isPresent(), "The '" + PrecomputedJoinIndexerConfig.PCJ_STORAGE_TYPE +
"' property must have one of the following values: " + Arrays.toString(PrecomputedJoinStorageType.values()));
// Create and return the configured storage.
switch(storageType.get()) {
case ACCUMULO:
return accumuloSupplier.get();
default:
throw new IllegalArgumentException("Unsupported PrecomputedJoinStorageType: " + storageType.get());
}
}
private void initBuilderParamsIfApplicable() {
if (!protoclass().environment().hasBuilderModule()) {
return;
}
isBuilderParameter = FParameterMirror.isPresent(element);
Optional<SwitchMirror> switcher = SwitchMirror.find(element);
if (switcher.isPresent()) {
if (isBuilderParameter) {
report().annotationNamed(FParameterMirror.simpleName())
.error("@%s and @%s annotations cannot be used on a same factory parameter",
FParameterMirror.simpleName(),
SwitchMirror.simpleName());
isBuilderParameter = false;
}
if (!isEnumType()) {
report().annotationNamed(SwitchMirror.simpleName())
.error("@%s annotation applicable only to enum parameters", SwitchMirror.simpleName());
} else {
builderSwitcherModel = new SwitcherModel(switcher.get(), names, containedTypeElement);
}
}
}
@Override
public boolean hasNext() {
while (this.currentIterator == null || !this.currentIterator.hasNext()) {
if (this.currentIterator != null) {
this.currentRequirement.add(this.currentIterator.totalResourcesUsed());
}
if (this.resourcePool.exceedsSoftBound(this.currentRequirement, true)) {
return false;
}
Optional<SingleTierIterator> tmp = this.singleTierIterator.nextTier();
if (!tmp.isPresent()) {
return false;
}
this.singleTierIterator = tmp.get();
ResourcePool contractedPool = this.resourcePool.contractPool(this.currentRequirement);
this.currentIterator = HierarchicalAllocator.this.underlying.allocateRequests(this.singleTierIterator, contractedPool);
}
return true;
}
@Override
public InstallConfiguration promptInstallConfiguration(final String instanceName) throws IOException {
final Optional<StorageType> storageType = sharedShellState.getShellState().getStorageType();
checkState(storageType.isPresent(), "The shell must be connected to a storage to use the install prompt.");
switch(sharedShellState.getShellState().getStorageType().get()) {
case ACCUMULO:
return promptAccumuloConfig(instanceName);
case MONGO:
return promptMongoConfig(instanceName);
default:
throw new IllegalStateException("Unsupported storage type: " + storageType.get());
}
}
@Override
public PrecomputedJoinUpdater get() {
// Ensure a configuration has been set.
final Configuration config = configSupplier.get();
checkNotNull(config, "Can not build the PrecomputedJoinUpdater until the PrecomputedJoinIndexer has been configured.");
final PrecomputedJoinIndexerConfig indexerConfig = new PrecomputedJoinIndexerConfig(config);
// Ensure an updater type has been set.
final Optional<PrecomputedJoinUpdaterType> updaterType = indexerConfig.getPcjUpdaterType();
checkArgument(updaterType.isPresent(), "The '" + PrecomputedJoinIndexerConfig.PCJ_UPDATER_TYPE +
"' property must have one of the following values: " + PrecomputedJoinUpdaterType.values());
// Create and return the configured updater.
switch(updaterType.get()) {
case FLUO:
return fluoSupplier.get();
default:
throw new IllegalArgumentException("Unsupported PrecomputedJoinUpdaterType: " + updaterType.get());
}
}
private static DatePartitionType getGranularity(State state, int numBranches, int branchId) {
String propName = ForkOperatorUtils.getPropertyNameForBranch(WRITER_PARTITION_GRANULARITY, numBranches, branchId);
String granularityValue = state.getProp(propName, DEFAULT_WRITER_PARTITION_GRANULARITY.toString());
Optional<DatePartitionType> granularity =
Enums.getIfPresent(DatePartitionType.class, granularityValue.toUpperCase());
Preconditions.checkState(granularity.isPresent(),
granularityValue + " is not a valid writer partition granularity");
return granularity.get();
}
private Checksum calculateChecksum(Optional<Checksum> originalChecksum, Path path)
throws IOException, InterruptedException {
if (originalChecksum.isPresent()) {
// The checksum is checked on download, so if we got here, the user provided checksum is good
return originalChecksum.get();
}
return Checksum.fromString(KeyType.SHA256, RepositoryCache.getChecksum(KeyType.SHA256, path));
}
public static String getAddPartitionQuery(String tableName, String partitionSpec, Optional<String> fileFormat, Optional<String> location) {
String query = "ALTER TABLE " + tableName + " ADD IF NOT EXISTS" + " PARTITION (" + partitionSpec + ")";
if (fileFormat.isPresent()) {
query = query + " FILEFORMAT " + fileFormat.get();
}
if (location.isPresent()) {
query = query + " LOCATION " + PartitionUtils.getQuotedString(location.get());
}
return query;
}
private IEObjectDescription getCorrectCandidateViaScope(Optional<IScope> scopeForCollisionCheck) {
try (Measurement m = contentAssistDataCollectors.dcDetectProposalConflicts().getMeasurement()) {
if (scopeForCollisionCheck.isPresent()) {
IScope scope = scopeForCollisionCheck.get();
IEObjectDescription candidateViaScope = getCandidateViaScope(scope);
candidateViaScope = specialcaseNamespaceShadowsOwnElement(scope, candidateViaScope);
return candidateViaScope;
}
return null;
}
}
@Override
public List<SimpleCommandResult> toResult(String sql, SqlNode sqlNode) throws Exception {
final SqlLoadMaterialization load = SqlNodeUtil.unwrap(sqlNode, SqlLoadMaterialization.class);
if(!SystemUser.SYSTEM_USERNAME.equals(context.getQueryUserName())) {
throw SqlExceptionHelper.parseError("$LOAD MATERIALIZATION not supported.", sql, load.getParserPosition()).build(logger);
}
final ReflectionService service = Preconditions.checkNotNull(context.getAccelerationManager().unwrap(ReflectionService.class),
"Couldn't unwrap ReflectionService");
final List<String> components = normalizeComponents(load.getMaterializationPath());
if (components == null) {
throw SqlExceptionHelper.parseError("Invalid materialization path.", sql, load.getParserPosition()).build(logger);
}
final ReflectionId reflectionId = new ReflectionId(components.get(0));
final Optional<ReflectionGoal> goalOptional = service.getGoal(reflectionId);
if (!goalOptional.isPresent()) {
throw SqlExceptionHelper.parseError("Unknown reflection id.", sql, load.getParserPosition()).build(logger);
}
final ReflectionGoal goal = goalOptional.get();
final MaterializationId materializationId = new MaterializationId(components.get(1));
final Optional<Materialization> materializationOpt = service.getMaterialization(materializationId);
if (!materializationOpt.isPresent()) {
throw SqlExceptionHelper.parseError("Unknown materialization id.", sql, load.getParserPosition()).build(logger);
}
final Materialization materialization = materializationOpt.get();
// if the user already made changes to the reflection goal, let's stop right here
Preconditions.checkState(ReflectionGoalsStore.checkGoalVersion(goal, materialization.getReflectionGoalVersion()),
"materialization no longer matches its goal");
refreshMetadata(goal, materialization);
return Collections.singletonList(SimpleCommandResult.successful("Materialization metadata loaded."));
}
/**
*
* @param hcatURI User specified hcatURI.
* @param broker A shared resource broker
* @return a {@link HiveConf} with specified hcatURI if any.
* @throws IOException
*/
public static <S extends ScopeType<S>> HiveConf get(Optional<String> hcatURI, SharedResourcesBroker<S> broker)
throws IOException {
try {
SharedHiveConfKey confKey =
hcatURI.isPresent() && StringUtils.isNotBlank(hcatURI.get()) ? new SharedHiveConfKey(hcatURI.get())
: SharedHiveConfKey.INSTANCE;
return broker.getSharedResource(new HiveConfFactory<>(), confKey);
} catch (NotConfiguredException nce) {
throw new IOException(nce);
}
}
protected Optional<AllocVal> isAllocationNode(Statement s, Val fact) {
Optional<Stmt> optUnit = s.getUnit();
if (optUnit.isPresent()) {
Stmt stmt = optUnit.get();
return options.getAllocationVal(s.getMethod(), stmt, fact, icfg());
}
return Optional.absent();
}
@Override
public IItemStack getItemInHand() {
Optional<ItemStack> itemInHand = player.getItemInHand();
if (itemInHand.isPresent()) {
return new SpongeItemStack(itemInHand.get());
} else {
return null;
}
}
@Override
protected void processLine(String[] nextLine, Map<String, PlaceLocation> result, int lineNumber)
{
String zipCode = trim(nextLine[0]);
String latitude = trim(nextLine[1]);
String longitude = trim(nextLine[2]);
String primaryCity = trim(nextLine[3]);
String state = trim(nextLine[4]);
String county = trim(nextLine[5]);
String timeZoneId = trim(nextLine[6]);
if (result.containsKey(zipCode))
{
throw new IllegalStateException(
format("Duplicate zipcode in record {zipcode:[%s] latitude:[%s] longitude:[%s] tzid:[%s]} on line %d",
zipCode, latitude, longitude, timeZoneId, lineNumber));
}
if (isAnyEmpty(zipCode, latitude, longitude, timeZoneId))
{
throw new IllegalStateException(
format("Empty field(s) in record {zipcode:[%s] latitude:[%s] longitude:[%s] tzid:[%s]} on line %d",
zipCode, latitude, longitude, timeZoneId, lineNumber));
}
Optional<TimeZone> timeZone = TimeZones.getTimeZoneById(timeZoneId);
if (!timeZone.isPresent())
{
throw new IllegalStateException(
format("Unknown tzid in record {zipcode:[%s] latitude:[%s] longitude:[%s] tzid:[%s]} on line %d",
zipCode, latitude, longitude, timeZoneId, lineNumber));
}
PlaceLocation placeLocation = new PlaceLocation(zipCode, GeoLocation.fromCoordinates(latitude, longitude),
primaryCity, state, county, timeZone.get());
placeLocation.setGeoPrecision(Place.GEOPRECISION_ZIP5);
result.put(zipCode, placeLocation);
}
@Override
public PersistentAccount call(Optional<PersistentAccount> subInput) {
if (!subInput.isPresent()) {
JsonObject jsonObject = new JsonObject()
.put("message", format("Account was modified concurrently. Please retry the request."));
throw new HttpRequestValidationException(HTTP_CONFLICT, jsonObject);
}
return subInput.get();
}
@Programmatic
public PhoneOrFaxNumber findByPhoneOrFaxNumber(
final CommunicationChannelOwner owner,
final String phoneNumber) {
final Optional<PhoneOrFaxNumber> phoneNumberIfFound = findByPhoneOrFaxNumber(owner, phoneNumber, CommunicationChannelType.PHONE_NUMBER);
if(phoneNumberIfFound.isPresent()) {
return phoneNumberIfFound.get();
}
final Optional<PhoneOrFaxNumber> faxNumberIfFound = findByPhoneOrFaxNumber(owner, phoneNumber, CommunicationChannelType.FAX_NUMBER);
return faxNumberIfFound.orNull();
}
private Optional<ApiParam> readApiParam(ParameterContext context) {
Optional<ApiMethod> optional = context.getOperationContext().findAnnotation(ApiMethod.class);
if (optional.isPresent()) {
ApiMethod apiMethod = optional.get();
ResolvedMethodParameter parameter = context.resolvedMethodParameter();
if (parameter.getParameterIndex() > apiMethod.params().length - 1) {
throw new SwaggerMoreException("The number of parameters in method " + context.getOperationContext().getName() + " does not match the number of @ApiParam.");
}
return Optional.of(apiMethod.params()[parameter.getParameterIndex()]);
}
return Optional.absent();
}
private DynamicDistributedLogConfiguration getDynConf(String streamName) {
Optional<DynamicDistributedLogConfiguration> dynDlConf =
streamConfigProvider.getDynamicStreamConfig(streamName);
if (dynDlConf.isPresent()) {
return dynDlConf.get();
} else {
return ConfUtils.getConstDynConf(dlConfig);
}
}
public CachedServerIcon getFavicon(FaviconSource source) {
Optional<CachedServerIcon> result = faviconCache.getUnchecked(source);
return result.isPresent() ? result.get() : null;
}
/**
* Creates the RepoRelativePath from a given resource. Returns null, if resource is not contained in a repository.
* If a repository is found, the simple name of the origin is used.
*/
public static RepoRelativePath compute(FileURI uriOfResource, IN4JSCore n4jsCore) {
Optional<? extends IN4JSProject> optProj = n4jsCore.findProject(uriOfResource.toURI());
if (!optProj.isPresent()) {
return null;
}
IN4JSProject project = optProj.get();
Path pathOfResource = uriOfResource.toFileSystemPath();
Path pathOfProject = project.getLocation().toFileSystemPath();
String fileOfResourceInsideProject = pathOfProject.relativize(pathOfResource).toString();
// strip anchor part if present, i.e. path to type within the resource
int anchorIndex = fileOfResourceInsideProject.indexOf("#");
if (anchorIndex >= 0)
fileOfResourceInsideProject = fileOfResourceInsideProject.substring(0, anchorIndex);
File absolutePathOfResource = pathOfProject.toAbsolutePath().resolve(fileOfResourceInsideProject).toFile();
if (!absolutePathOfResource.exists()) {
return null;
}
// note: for retrieving the repo relative folder name, we must not rely on single path segments as they
// may appear more than once. E.g. "n4js" maybe the folder of the oomph installation, the simple name of the
// repository folder and a folder representing the package n4js.
File repoFolder = getRepoFolder(absolutePathOfResource);
// for resolving the repo relative path,
// we only care about the repo folder name, since the folder may be named differently
String pathOfProjectInRepo = getRepoPath(repoFolder, pathOfProject.getParent().toFile());
if (pathOfProjectInRepo == null) {
return null;
}
String pathOfResourceInProject = '/' + fileOfResourceInsideProject;
// ensure slashes
if (File.separatorChar != '/') {
pathOfResourceInProject = pathOfResourceInProject.replace(File.separatorChar, '/');
pathOfProjectInRepo = pathOfProjectInRepo.replace(File.separatorChar, '/');
}
N4JSProjectName projName = project.getProjectName();
String repoName = getRepoName(repoFolder);
return new RepoRelativePath(repoName, pathOfProjectInRepo, // repo relative
projName, pathOfResourceInProject, // project relative
-1);
}