下面列出了怎么用com.google.common.cache.LoadingCache的API类实例代码及写法,或者点击链接到github查看源代码。
/**
* Write records to potentially different schemas and tables using EL expressions, and handle errors.
* @param batch batch of SDC records
* @param schemaTableClassifier classifier to group records according to the schema and table names, resolving the
* EL expressions involved.
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @param tableCreator handler which creates the table if it does not exist yet
* @throws StageException
*/
public void write(
Batch batch,
SchemaTableClassifier schemaTableClassifier,
LoadingCache<SchemaAndTable, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord,
JdbcTableCreator tableCreator
) throws StageException {
Multimap<SchemaAndTable, Record> partitions = schemaTableClassifier.classify(batch);
for (SchemaAndTable key : partitions.keySet()) {
tableCreator.create(key.getSchemaName(), key.getTableName());
Iterator<Record> recordIterator = partitions.get(key).iterator();
write(recordIterator, key, recordWriters, errorRecordHandler, perRecord);
}
}
@BeforeMethod
public void beforeMethod() throws Exception {
this.base = DAOTestBase.getInstance();
LoadingCache<String, DatasetConfigDTO> mockDatasetConfigCache = Mockito.mock(LoadingCache.class);
Mockito.when(mockDatasetConfigCache.get(COLLECTION)).thenReturn(new DatasetConfigDTO());
LoadingCache<MetricDataset, MetricConfigDTO> mockMetricConfigCache = Mockito.mock(LoadingCache.class);
Mockito.when(mockMetricConfigCache.get(METRIC)).thenReturn(new MetricConfigDTO());
ThirdEyeCacheRegistry.getInstance().registerDatasetConfigCache(mockDatasetConfigCache);
ThirdEyeCacheRegistry.getInstance().registerMetricConfigCache(mockMetricConfigCache);
MetricConfigDTO metricConfigDTO = new MetricConfigDTO();
metricConfigDTO.setDataset(COLLECTION);
metricConfigDTO.setName(METRIC.getMetricName());
metricConfigDTO.setAlias(METRIC.getDataset() + "::" + METRIC.getMetricName());
this.metricId = DAORegistry.getInstance().getMetricConfigDAO().save(metricConfigDTO);
}
private static FileChunk searchFileChunk(String queryId,
String ebSeqId,
Path outDir,
String startKey,
String endKey,
boolean last,
LoadingCache<IndexCacheKey, BSTIndexReader> indexReaderCache,
int lowCacheHitCheckThreshold) throws IOException, ExecutionException {
final SearchResult result = searchCorrespondPart(queryId, ebSeqId, outDir, startKey, endKey, last,
indexReaderCache, lowCacheHitCheckThreshold);
if (result != null) {
long startOffset = result.startOffset;
long endOffset = result.endOffset;
FileChunk chunk = new FileChunk(result.data, startOffset, endOffset - startOffset);
if (LOG.isDebugEnabled()) LOG.debug("Retrieve File Chunk: " + chunk);
return chunk;
} else {
return null;
}
}
/**
* Creates a matcher that will match methods of an interface, optionally excluding inherited
* methods.
*
* @param matchInterface The interface to match.
* @param declaredMethodsOnly if {@code true} only methods directly declared in the interface
* will be matched, otherwise all methods on the interface are matched.
* @return A new matcher instance.
*/
public static Matcher<Method> interfaceMatcher(
Class<?> matchInterface,
boolean declaredMethodsOnly) {
Method[] methods =
declaredMethodsOnly ? matchInterface.getDeclaredMethods() : matchInterface.getMethods();
final Set<Pair<String, Class<?>[]>> interfaceMethods =
ImmutableSet.copyOf(Iterables.transform(ImmutableList.copyOf(methods), CANONICALIZE));
final LoadingCache<Method, Pair<String, Class<?>[]>> cache = CacheBuilder.newBuilder()
.build(CacheLoader.from(CANONICALIZE));
return new AbstractMatcher<Method>() {
@Override
public boolean matches(Method method) {
return interfaceMethods.contains(cache.getUnchecked(method));
}
};
}
private LoadingCache<AnomalySlice, Collection<MergedAnomalyResultDTO>> initCache() {
LOG.info("Initializing anomalies cache");
return CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.maximumSize(10000)
.build(new CacheLoader<AnomalySlice, Collection<MergedAnomalyResultDTO>>() {
@Override
public Collection<MergedAnomalyResultDTO> load(AnomalySlice slice) {
return loadAnomalies(Collections.singleton(slice)).get(slice);
}
@Override
public Map<AnomalySlice, Collection<MergedAnomalyResultDTO>> loadAll(Iterable<? extends AnomalySlice> slices) {
return loadAnomalies(Lists.newArrayList(slices));
}
});
}
@Test
public void whenCacheReachMaxSizeRemove(){
CacheLoader<String, String> loader;
loader = new CacheLoader<String, String>() {
@Override
public String load(String key) throws Exception {
return key.toUpperCase();
}
};
LoadingCache<String, String> cache;
cache = CacheBuilder.newBuilder().maximumSize(3).build(loader);
cache.getUnchecked("one");
cache.getUnchecked("two");
cache.getUnchecked("three");
cache.getUnchecked("four");
assertEquals(3, cache.size());
assertEquals(null, cache.getIfPresent("one"));
assertEquals("FOUR", cache.getIfPresent("four"));
}
@Test
public void whenEntryLiveTimeExpire_thenEviction() throws InterruptedException {
final CacheLoader<String, String> loader = new CacheLoader<String, String>() {
@Override
public final String load(final String key) {
return key.toUpperCase();
}
};
final LoadingCache<String, String> cache = CacheBuilder.newBuilder().expireAfterWrite(2, TimeUnit.MILLISECONDS).build(loader);
cache.getUnchecked("hello");
assertEquals(1, cache.size());
Thread.sleep(3);
cache.getUnchecked("test");
assertEquals(1, cache.size());
assertNull(cache.getIfPresent("hello"));
}
private LoadingCache<Key, SchemaVersionInfo> createLoadingCache(SchemaVersionRetriever schemaRetriever,
int schemaCacheSize,
long schemaCacheExpiryInMilliSecs) {
return CacheBuilder.newBuilder()
.maximumSize(schemaCacheSize)
.expireAfterAccess(schemaCacheExpiryInMilliSecs, TimeUnit.MILLISECONDS)
.build(new CacheLoader<Key, SchemaVersionInfo>() {
@Override
public SchemaVersionInfo load(Key key) throws Exception {
LOG.info("Loading entry for cache with key [{}] from target service", key);
SchemaVersionInfo schemaVersionInfo;
if (key.schemaVersionKey != null) {
schemaVersionInfo = schemaRetriever.retrieveSchemaVersion(key.schemaVersionKey);
} else if (key.schemaIdVersion != null) {
schemaVersionInfo = schemaRetriever.retrieveSchemaVersion(key.schemaIdVersion);
} else {
throw new IllegalArgumentException("Given argument is not valid: " + key);
}
updateCacheInvalidationEntries(schemaVersionInfo);
return schemaVersionInfo;
}
});
}
private LoadingCache<Long, Page<T>> buildCache(int size) {
return CacheBuilder.newBuilder().concurrencyLevel(1).initialCapacity(size).maximumSize(size)
.removalListener(new RemovalListener<Long, Page<T>>() {
@Override
public void onRemoval(RemovalNotification<Long, Page<T>> notification) {
m_recentlyExpiredPagesCache.get().put(notification.getKey(), true);
}
}).build(new CacheLoader<Long, Page<T>>() {
@Override
public Page<T> load(Long pageNo) throws Exception {
return new Page<>(pageNo, m_pageSize, m_pageLoadIntervalMillis);
}
});
}
@Test
public void whenEntryIdle_thenEviction() throws InterruptedException {
final CacheLoader<String, String> loader = new CacheLoader<String, String>() {
@Override
public final String load(final String key) {
return key.toUpperCase();
}
};
final LoadingCache<String, String> cache = CacheBuilder.newBuilder().expireAfterAccess(2, TimeUnit.MILLISECONDS).build(loader);
cache.getUnchecked("hello");
assertEquals(1, cache.size());
cache.getUnchecked("hello");
Thread.sleep(3);
cache.getUnchecked("test");
assertEquals(1, cache.size());
assertNull(cache.getIfPresent("hello"));
}
private static FileChunkMeta searchFileChunkMeta(String queryId,
String ebSeqId,
String taskId,
Path outDir,
String startKey,
String endKey,
boolean last,
LoadingCache<IndexCacheKey, BSTIndexReader> indexReaderCache,
int lowCacheHitCheckThreshold) throws IOException, ExecutionException {
SearchResult result = searchCorrespondPart(queryId, ebSeqId, outDir, startKey, endKey, last,
indexReaderCache, lowCacheHitCheckThreshold);
// Do not send file chunks of 0 length
if (result != null) {
long startOffset = result.startOffset;
long endOffset = result.endOffset;
FileChunkMeta chunk = new FileChunkMeta(startOffset, endOffset - startOffset, ebSeqId, taskId);
if (LOG.isDebugEnabled()) LOG.debug("Retrieve File Chunk: " + chunk);
return chunk;
} else {
return null;
}
}
@SuppressWarnings("try")
public Source getSource(final File file) throws ExecutionException {
try (TelemetryUtils.ScopedSpan scope =
TelemetryUtils.startScopedSpan("GlobalCache.getSource")) {
TelemetryUtils.ScopedSpan.addAnnotation(
TelemetryUtils.annotationBuilder().put("file", file.getPath()).build("args"));
final LoadingCache<File, Source> sourceCache = this.getSourceCache();
return sourceCache.get(file);
}
}
@Inject AWSEC2ImageSupplier(@Region Supplier<Set<String>> regions, @ImageQuery Map<String, String> queries,
@Named(PROPERTY_EC2_CC_REGIONS) String clusterRegions,
Supplier<LoadingCache<RegionAndName, ? extends Image>> cache, CallForImages.Factory factory,
@ClusterCompute Set<String> clusterComputeIds,
@Named(PROPERTY_USER_THREADS) ListeningExecutorService userExecutor) {
this.factory = factory;
this.regions = regions;
this.queries = queries;
this.clusterRegions = Splitter.on(',').split(clusterRegions);
this.cache = cache;
this.clusterComputeIds = clusterComputeIds;
this.userExecutor = userExecutor;
}
/**
* Builds the Read Context Cache {@link #tableReadContextCache}
*/
@SuppressWarnings("unchecked")
private LoadingCache<TableRuntimeContext, TableReadContext> buildReadContextCache(CacheLoader<TableRuntimeContext, TableReadContext> tableCacheLoader) {
CacheBuilder resultSetCacheBuilder = CacheBuilder.newBuilder()
.removalListener(new JdbcTableReadContextInvalidationListener());
if (tableJdbcConfigBean.batchTableStrategy == BatchTableStrategy.SWITCH_TABLES) {
if (tableJdbcConfigBean.resultCacheSize > 0) {
resultSetCacheBuilder = resultSetCacheBuilder.maximumSize(tableJdbcConfigBean.resultCacheSize);
}
} else {
resultSetCacheBuilder = resultSetCacheBuilder.maximumSize(1);
}
if (tableCacheLoader != null) {
return resultSetCacheBuilder.build(tableCacheLoader);
} else {
return resultSetCacheBuilder.build(new JdbcTableReadContextLoader(
connectionManager,
offsets,
tableJdbcConfigBean.fetchSize,
tableJdbcConfigBean.quoteChar.getQuoteCharacter(),
tableJdbcELEvalContext,
isReconnect
));
}
}
private LoadingCache<String, Meter> createMetricCache(String metricName) {
return CacheBuilder.newBuilder()
.build(new CacheLoader<String, Meter>() {
@Override
public Meter load(String key) throws Exception {
String metric = new TaggedName.TaggedNameBuilder()
.metricName(METRIC_PREFIX + metricName)
.addTag("apiKey", key)
.build()
.encode();
return _metricRegistry.meter(metric);
}
});
}
private static <K, V> V get(LoadingCache<K, V> cache, K key)
{
try {
return cache.getUnchecked(key);
}
catch (UncheckedExecutionException e) {
throwIfInstanceOf(e.getCause(), PrestoException.class);
throw e;
}
}
@Nonnull
private static Map<BuildTarget, String> getCachedBuildTargetToLinkGroupMap(
ImmutableList<CxxLinkGroupMapping> mapping, TargetGraph targetGraph) {
LoadingCache<ImmutableList<CxxLinkGroupMapping>, Map<BuildTarget, String>> groupingCache =
graphCache.getUnchecked(targetGraph);
return groupingCache.getUnchecked(mapping);
}
@NotNull
@SuppressWarnings("unchecked")
public static<Key, Value> LoadingCache<Key, Optional<Value>> buildCache(
CacheLoader<Key, Optional<Value>> cacheLoader,
CacheConfig conf,
Optional<Value> defaultValue
) {
return new OptionalLoadingCache(
!conf.retryOnCacheMiss,
createBuilder(conf).build(cacheLoader),
defaultValue
);
}
private boolean updateQuotaFor(UUID placeId, LoadingCache<UUID, QuotaEntry> cache, QuotaEntry entry) {
for(int i=0; i<10; i++) {
QuotaEntry old = cache.getIfPresent(placeId);
if(old == null || old.timestamp > entry.timestamp) {
return false;
}
if(cache.asMap().replace(placeId, old, entry)) {
return true;
}
}
return false;
}
private static void incrementJobStatCounter(
LoadingCache<String, AtomicLong> counter,
String prefix,
IJobKey jobKey) {
counter.getUnchecked(prefix + JobKeys.canonicalString(jobKey)).incrementAndGet();
}
/** Creates a new @{link SemanticPredictorMetrics}. */
public static <InputT, VectorT, ValueT> SemanticPredictorMetrics<InputT, VectorT, ValueT> create(
final SemanticMetricRegistry registry, final MetricId metricId) {
final LoadingCache<Id, Metrics> metersCache =
CacheBuilder.newBuilder()
.build(
new CacheLoader<Id, Metrics>() {
@Override
public Metrics load(final Id id) {
return Metrics.create(registry, metricId.tagged("model", id.value()));
}
});
return new AutoValue_SemanticPredictorMetrics<>(metersCache);
}
static CustomPredictorMetrics create(
final SemanticMetricRegistry registry, final MetricId metricId) {
final LoadingCache<Model.Id, CustomMetrics> metersCache =
CacheBuilder.newBuilder()
.build(
new CacheLoader<Model.Id, CustomMetrics>() {
@Override
public CustomMetrics load(final Model.Id id) {
return CustomMetrics.create(registry, metricId.tagged("model", id.value()));
}
});
return new AutoValue_CustomMetricsExample_CustomPredictorMetrics(metersCache);
}
@Override
public ValueWrapper get(Object key) {
if (this.cache instanceof LoadingCache) {
try {
Object value = ((LoadingCache<Object, Object>) this.cache).get(key);
return toValueWrapper(value);
}
catch (ExecutionException ex) {
throw new UncheckedExecutionException(ex.getMessage(), ex);
}
}
return super.get(key);
}
/**
* Write records to a JDBC destination using the recordWriter specified by key, and handle errors
*
* @param recordIterator iterator of SDC records
* @param key key to select the recordWriter
* @param recordWriters JDBC record writer cache
* @param errorRecordHandler error record handler
* @param perRecord indicate record or batch update
* @throws StageException
*/
public <T> void write(
Iterator<Record> recordIterator,
T key,
LoadingCache<T, JdbcRecordWriter> recordWriters,
ErrorRecordHandler errorRecordHandler,
boolean perRecord
) throws StageException {
final JdbcRecordWriter jdbcRecordWriter;
try {
jdbcRecordWriter = recordWriters.getUnchecked(key);
} catch (UncheckedExecutionException ex) {
final Throwable throwable = ex.getCause();
final ErrorCode errorCode;
final Object[] messageParams;
if (throwable instanceof StageException) {
StageException stageEx = (StageException) ex.getCause();
errorCode = stageEx.getErrorCode();
messageParams = stageEx.getParams();
} else {
errorCode = JdbcErrors.JDBC_301;
messageParams = new Object[] {ex.getMessage(), ex.getCause()};
}
// Failed to create RecordWriter, report all as error records.
while (recordIterator.hasNext()) {
Record record = recordIterator.next();
errorRecordHandler.onError(new OnRecordErrorException(record, errorCode, messageParams));
}
return;
}
List<OnRecordErrorException> errors = perRecord
? jdbcRecordWriter.writePerRecord(recordIterator)
: jdbcRecordWriter.writeBatch(recordIterator);
for (OnRecordErrorException error : errors) {
errorRecordHandler.onError(error);
}
}
@VisibleForTesting
static LoadingCache<Key<PremiumListEntry>, Optional<PremiumListEntry>>
createCachePremiumListEntries(Duration cachePersistDuration) {
return CacheBuilder.newBuilder()
.expireAfterWrite(cachePersistDuration.getMillis(), MILLISECONDS)
.maximumSize(getStaticPremiumListMaxCachedEntries())
.build(
new CacheLoader<Key<PremiumListEntry>, Optional<PremiumListEntry>>() {
@Override
public Optional<PremiumListEntry> load(final Key<PremiumListEntry> entryKey) {
return tm()
.doTransactionless(() -> Optional.ofNullable(ofy().load().key(entryKey).now()));
}
});
}
@Override
public IntCollection greaterOrEqual(int valueId, double max) {
if (useCache) {
GreaterOrEqualCall call = new GreaterOrEqualCall(valueId, max);
LoadingCache<GreaterOrEqualCall, IntCollection> cache = getCache();
return cache.getUnchecked(call);
}
return greaterOrEqual_(valueId, max);
}
public FsDatasetStateStore(FileSystem fs, String storeRootDir, Integer threadPoolSize,
LoadingCache<Path, DatasetUrnStateStoreNameParser> stateStoreNameParserLoadingCache) {
super(fs, storeRootDir, JobState.DatasetState.class);
this.useTmpFileForPut = false;
this.threadPoolOfGettingDatasetState = threadPoolSize;
this.stateStoreNameParserLoadingCache = stateStoreNameParserLoadingCache;
}
@VisibleForTesting
static LoadingCache<String, Optional<PremiumList>> createCachePremiumLists(
Duration cachePersistDuration) {
return CacheBuilder.newBuilder()
.expireAfterWrite(cachePersistDuration.getMillis(), MILLISECONDS)
.build(
new CacheLoader<String, Optional<PremiumList>>() {
@Override
public Optional<PremiumList> load(String premiumListName) {
return PremiumListDao.getLatestRevision(premiumListName);
}
});
}
private LoadingCache<UUID, FactTypeEntity> createFactTypeByIdCache() {
return CacheBuilder.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(new CacheLoader<UUID, FactTypeEntity>() {
@Override
public FactTypeEntity load(UUID key) throws Exception {
return ObjectUtils.notNull(factTypeDao.get(key), new Exception(String.format("FactType with id = %s does not exist.", key)));
}
});
}
protected LoadingCache<NoteCoordinate, Element> getElementCache() {
if (elementCache_ == null) {
elementCache_ = CacheBuilder.newBuilder().maximumSize(16384).expireAfterWrite(20, TimeUnit.HOURS)
.build(new ElementStoreCacheLoader(this));
}
return elementCache_;
}