下面列出了java.util.function.UnaryOperator#identity ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Test
void eliminatesUselessLoans() {
final Loan alreadyInvested = new MockLoanBuilder()
.set(LoanImpl::setRating, Rating.B)
.set(LoanImpl::setRemainingInvestment, Money.from(1))
.set(LoanImpl::setReservedAmount, Money.from(0))
.set(LoanImpl::setMyInvestment, mockMyInvestment())
.build();
final Loan normal = new MockLoanBuilder()
.set(LoanImpl::setRating, Rating.A)
.set(LoanImpl::setRemainingInvestment, Money.from(1))
.set(LoanImpl::setReservedAmount, Money.from(0))
.build();
final Zonky zonky = harmlessZonky();
when(zonky.getAvailableLoans(any())).thenReturn(Stream.of(alreadyInvested, normal));
final Tenant tenant = mockTenant(zonky);
final AbstractMarketplaceAccessor<LoanDescriptor> d = new PrimaryMarketplaceAccessor(tenant,
UnaryOperator.identity());
final Collection<LoanDescriptor> ld = d.getMarketplace();
assertThat(ld).hasSize(1)
.element(0)
.extracting(LoanDescriptor::item)
.isSameAs(normal);
}
@Test
void readsMarketplace() {
final Loan l = new MockLoanBuilder().build();
int loanId = l.getId();
final Participation p = mock(ParticipationImpl.class);
when(p.getId()).thenReturn(1l);
when(p.getLoanId()).thenReturn(loanId);
when(p.getLoanHealthInfo()).thenReturn(LoanHealth.HEALTHY);
final Zonky zonky = harmlessZonky();
when(zonky.getLoan(eq(loanId))).thenReturn(l);
when(zonky.getAvailableParticipations(any())).thenReturn(Stream.of(p));
final PowerTenant tenant = mockTenant(zonky);
final AbstractMarketplaceAccessor<ParticipationDescriptor> d = new SecondaryMarketplaceAccessor(tenant,
UnaryOperator.identity());
final Collection<ParticipationDescriptor> pd = d.getMarketplace();
assertThat(pd).hasSize(1)
.element(0)
.extracting(ParticipationDescriptor::item)
.isSameAs(p);
assertThat(pd)
.element(0)
.extracting(ParticipationDescriptor::related)
.isSameAs(l);
}
private Bucket collect(RoutedCollectPhase collectNode) throws Throwable {
JobSetup jobSetup = internalCluster().getDataNodeInstance(JobSetup.class);
TasksService tasksService = internalCluster().getDataNodeInstance(TasksService.class);
SharedShardContexts sharedShardContexts = new SharedShardContexts(
internalCluster().getDataNodeInstance(IndicesService.class), UnaryOperator.identity());
RootTask.Builder builder = tasksService.newBuilder(collectNode.jobId());
NodeOperation nodeOperation = NodeOperation.withDirectResponse(collectNode, mock(ExecutionPhase.class), (byte) 0,
"remoteNode");
List<CompletableFuture<StreamBucket>> results = jobSetup.prepareOnRemote(
DUMMY_SESSION_INFO,
ImmutableList.of(nodeOperation),
builder,
sharedShardContexts
);
RootTask rootTask = tasksService.createTask(builder);
rootTask.start();
return results.get(0).get(2, TimeUnit.SECONDS);
}
@Test
public void testGetIndexServiceForInvalidReaderId() throws Exception {
final FetchTask context = new FetchTask(
UUID.randomUUID(),
new FetchPhase(
1,
null,
new TreeMap<>(),
HashMultimap.create(),
ImmutableList.of()),
"dummy",
new SharedShardContexts(mock(IndicesService.class), UnaryOperator.identity()),
clusterService.state().getMetaData(),
relationName -> null,
Collections.emptyList());
expectedException.expect(IllegalArgumentException.class);
context.indexService(10);
}
@Override
public SupervisedFilterFactory<DI> create(Parameters params,
IResourceRepository repo) {
return new SupervisedFilterFactory<DI>(
params.contains("ignoreFilter")
? repo.get(params.get("ignoreFilter"))
: PredicateUtils.alwaysTrue(),
PredicateUtils.alwaysTrue(),
params.contains("constantPreprocessor")
? repo.get(params.get("constantPreprocessor"))
: UnaryOperator.identity());
}
public static UnaryOperator<Optional<XFileKey>> create(PropertyDataProtection mode) {
switch (mode) {
case AUTO:
return UnaryOperator.identity();
case CBC:
return create(DPCipherFactories.AES_CBC);
case XTS:
return create(DPCipherFactories.AES_XTS);
case OFF:
return key -> Optional.empty();
default:
throw new UnsupportedOperationException("unknown mode: " + mode);
}
}
private SharedShardContexts maybeInstrumentProfiler(RootTask.Builder builder) {
if (enableProfiling) {
QueryProfiler queryProfiler = new QueryProfiler();
ProfilingContext profilingContext = new ProfilingContext(queryProfiler::getTree);
builder.profilingContext(profilingContext);
return new SharedShardContexts(
indicesService,
indexSearcher -> new InstrumentedIndexSearcher(indexSearcher.getIndexReader(), queryProfiler)
);
} else {
return new SharedShardContexts(indicesService, UnaryOperator.identity());
}
}
private SharedShardContexts maybeInstrumentProfiler(boolean enableProfiling, RootTask.Builder contextBuilder) {
if (enableProfiling) {
QueryProfiler queryProfiler = new QueryProfiler();
ProfilingContext profilingContext = new ProfilingContext(queryProfiler::getTree);
contextBuilder.profilingContext(profilingContext);
return new SharedShardContexts(
indicesService,
indexSearcher -> new InstrumentedIndexSearcher(indexSearcher.getIndexReader(), queryProfiler)
);
} else {
return new SharedShardContexts(indicesService, UnaryOperator.identity());
}
}
@Test
public void testSearcherIsAcquiredForShard() throws Exception {
IntArrayList shards = IntArrayList.from(1, 2);
Routing routing = new Routing(Map.of("dummy", Map.of("i1", shards)));
IndexBaseBuilder ibb = new IndexBaseBuilder();
ibb.allocate("i1", shards);
HashMultimap<RelationName, String> tableIndices = HashMultimap.create();
tableIndices.put(new RelationName(Schemas.DOC_SCHEMA_NAME, "i1"), "i1");
MetaData metaData = MetaData.builder()
.put(IndexMetaData.builder("i1")
.settings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.put(SETTING_VERSION_CREATED, Version.CURRENT))
.build(), true)
.build();
final FetchTask context = new FetchTask(
UUID.randomUUID(),
new FetchPhase(
1,
null,
ibb.build(),
tableIndices,
ImmutableList.of(createReference("i1", new ColumnIdent("x"), DataTypes.STRING))),
"dummy",
new SharedShardContexts(mock(IndicesService.class, RETURNS_MOCKS), UnaryOperator.identity()),
metaData,
relationName -> null,
ImmutableList.of(routing));
context.prepare();
assertThat(context.searcher(1), Matchers.notNullValue());
assertThat(context.searcher(2), Matchers.notNullValue());
}
public TokenParser(String openToken, String closeToken) {
this(openToken, closeToken, UnaryOperator.identity(), UnaryOperator.identity());
}
public TokenParser(String openToken, String closeToken, UnaryOperator<String> tokenHandler) {
this(openToken, closeToken, UnaryOperator.identity(), tokenHandler);
}
public SupervisedFilterFactory(
final Predicate<LogicalConstant> constantFilter) {
this(constantFilter, PredicateUtils.alwaysTrue(),
UnaryOperator.identity());
}
public Arg(Property property, Option option) {
this(property, option, UnaryOperator.identity());
}
public SqlValidatorTester(SqlTestFactory factory) {
this(factory, UnaryOperator.identity());
}
/**
* Creates a new gRPC interceptor that will collect metrics into the given {@link MeterRegistry}. This method won't
* use any customizers and will only initialize the {@link Code#OK OK} status.
*
* @param registry The registry to use.
*/
public AbstractMetricCollectingInterceptor(final MeterRegistry registry) {
this(registry, UnaryOperator.identity(), UnaryOperator.identity(), Code.OK);
}
/**
* Creates a new gRPC interceptor that will collect metrics into the given {@link MeterRegistry}. This method won't
* use any customizers and will only initialize the {@link Code#OK OK} status.
*
* @param registry The registry to use.
*/
public AbstractMetricCollectingInterceptor(final MeterRegistry registry) {
this(registry, UnaryOperator.identity(), UnaryOperator.identity(), Code.OK);
}
/**
* Returns a remapping function for provided particle type <br>
* Returned remapping function is never null <br>
* The remapping function itself however can return new particle or null (which means that particle should be skipped)
* @param particle particle type
* @return remapping function
*/
public Function<Particle, Particle> getRemap(Class<? extends Particle> particle) {
Function<Particle, Particle> function = table.get(particle);
return function != null ? function : UnaryOperator.identity();
}
/**
* Provides a function to modify global custom meta data on startup.
* <p>
* Plugins should return the input custom map via {@link UnaryOperator#identity()} if no upgrade is required.
* <p>
* The order of custom meta data upgraders calls is undefined and can change between runs so, it is expected that
* plugins will modify only data owned by them to avoid conflicts.
* <p>
* @return Never {@code null}. The same or upgraded {@code MetaData.Custom} map.
* @throws IllegalStateException if the node should not start because at least one {@code MetaData.Custom}
* is unsupported
*/
public UnaryOperator<Map<String, MetaData.Custom>> getCustomMetaDataUpgrader() {
return UnaryOperator.identity();
}
/**
* Provides a function to modify index template meta data on startup.
* <p>
* Plugins should return the input template map via {@link UnaryOperator#identity()} if no upgrade is required.
* <p>
* The order of the template upgrader calls is undefined and can change between runs so, it is expected that
* plugins will modify only templates owned by them to avoid conflicts.
* <p>
* @return Never {@code null}. The same or upgraded {@code IndexTemplateMetaData} map.
* @throws IllegalStateException if the node should not start because at least one {@code IndexTemplateMetaData}
* cannot be upgraded
*/
public UnaryOperator<Map<String, IndexTemplateMetaData>> getIndexTemplateMetaDataUpgrader() {
return UnaryOperator.identity();
}
/**
* Provides a function to modify index meta data when an index is introduced into the cluster state for the first time.
* <p>
* Plugins should return the input index metadata via {@link UnaryOperator#identity()} if no upgrade is required.
* <p>
* The order of the index upgrader calls for the same index is undefined and can change between runs so, it is expected that
* plugins will modify only indices owned by them to avoid conflicts.
* <p>
* @return Never {@code null}. The same or upgraded {@code IndexMetaData}.
* @throws IllegalStateException if the node should not start because the index is unsupported
*/
public UnaryOperator<IndexMetaData> getIndexMetaDataUpgrader() {
return UnaryOperator.identity();
}