下面列出了怎么用com.google.common.collect.MapMaker的API类实例代码及写法,或者点击链接到github查看源代码。
public void start() {
super.start();
batches = MigrateMap.makeComputingMap(new Function<ClientIdentity, MemoryClientIdentityBatch>() {
public MemoryClientIdentityBatch apply(ClientIdentity clientIdentity) {
return MemoryClientIdentityBatch.create(clientIdentity);
}
});
cursors = new MapMaker().makeMap();
destinations = MigrateMap.makeComputingMap(new Function<String, List<ClientIdentity>>() {
public List<ClientIdentity> apply(String destination) {
return Lists.newArrayList();
}
});
}
/**
* Adds a shop to the world. Does NOT require the chunk or world to be loaded Call shop.onLoad by
* yourself
*
* @param world The name of the world
* @param shop The shop to add
*/
public void addShop(@NotNull String world, @NotNull Shop shop) {
Map<ShopChunk, Map<Location, Shop>> inWorld = this.getShops().computeIfAbsent(world, k -> new MapMaker().initialCapacity(3).makeMap());
// There's no world storage yet. We need to create that map.
// Put it in the data universe
// Calculate the chunks coordinates. These are 1,2,3 for each chunk, NOT
// location rounded to the nearest 16.
int x = (int) Math.floor((shop.getLocation().getBlockX()) / 16.0);
int z = (int) Math.floor((shop.getLocation().getBlockZ()) / 16.0);
// Get the chunk set from the world info
ShopChunk shopChunk = new ShopChunk(world, x, z);
Map<Location, Shop> inChunk = inWorld.computeIfAbsent(shopChunk, k -> new MapMaker().initialCapacity(1).makeMap());
// That chunk data hasn't been created yet - Create it!
// Put it in the world
// Put the shop in its location in the chunk list.
inChunk.put(shop.getLocation(), shop);
// shop.onLoad();
}
private static void init() {
if (!init) {
synchronized (MetricsCache.class) {
if (!init) {
cache = new MapMaker().concurrencyLevel(4).weakValues().makeMap();
scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(new ThreadFactory() {
@Override
public Thread newThread(Runnable r) {
Thread thread = new Thread(r);
thread.setDaemon(true);
thread.setName("watcher-cache-evict-thread");
return thread;
}
});
scheduledExecutorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
INSTANCE.evict();
}
}, 1l, 1l, TimeUnit.SECONDS);
init = true;
}
}
}
}
private static Method getCachedMethod(Class<?> clazz, String methodName,
Class<?>... methodParamTypes) throws NoSuchMethodException {
Map<Class<?>, Method> mapOfMethods = mapOfMapOfMethods.get(methodName);
if (mapOfMethods == null) {
mapOfMethods = new MapMaker().makeMap();
Map<Class<?>, Method> previous =
mapOfMapOfMethods.putIfAbsent(methodName, mapOfMethods);
mapOfMethods = previous == null ? mapOfMethods : previous;
}
Method method = mapOfMethods.get(clazz);
if (method == null) {
method = clazz.getMethod(methodName, methodParamTypes);
mapOfMethods.putIfAbsent(clazz, method);
// NB: it doesn't matter which method we return in the event of a race.
}
return method;
}
@Test
public void testWeakRefsMap() throws Exception {
ConcurrentMap<String, Object> objects = new MapMaker().weakValues().makeMap();
objects.put("xxx", new Object());
if (null == objects.get("xxx")) {
Assert.fail("Reference should NOT be null");
}
try {
@SuppressWarnings("unused")
Object[] ignored = new Object[(int) Runtime.getRuntime().maxMemory()];
} catch (Throwable e) {
// Ignore OME
}
if (null != objects.get("xxx")) {
Assert.fail("Reference should be null");
}
}
@Test
public void whenCreateCaches_thenCreated() {
ConcurrentMap<User, Session> sessionCache = new MapMaker().makeMap();
assertNotNull(sessionCache);
ConcurrentMap<User, Profile> profileCache = new MapMaker().makeMap();
assertNotNull(profileCache);
User userA = new User(1, "UserA");
sessionCache.put(userA, new Session(100));
Assert.assertThat(sessionCache.size(), equalTo(1));
profileCache.put(userA, new Profile(1000, "Personal"));
Assert.assertThat(profileCache.size(), equalTo(1));
}
public void start() {
super.start();
batches = MigrateMap.makeComputingMap(new Function<ClientIdentity, MemoryClientIdentityBatch>() {
public MemoryClientIdentityBatch apply(ClientIdentity clientIdentity) {
return MemoryClientIdentityBatch.create(clientIdentity);
}
});
cursors = new MapMaker().makeMap();
destinations = MigrateMap.makeComputingMap(new Function<String, List<ClientIdentity>>() {
public List<ClientIdentity> apply(String destination) {
return Lists.newArrayList();
}
});
}
@Override
public void addHeaders(Map<String, String> headers) {
ConcurrentMap<String, String> newHeaders = new MapMaker().concurrencyLevel(1).makeMap();
newHeaders.putAll(headers);
this.additionalHeaders = newHeaders;
}
@Override
public void addHeaders(Map<String, String> headers) {
ConcurrentMap<String, String> newHeaders = new MapMaker().concurrencyLevel(1).makeMap();
newHeaders.putAll(headers);
this.additionalHeaders = newHeaders;
}
/**
* Creates a new, empty ConcurrentHashSet.
*/
public ConcurrentHashSet() {
// had some really weird NPEs with Java's ConcurrentHashMap (i.e. got a
// NPE at size()), now trying witrh Guava instead
delegate = new MapMaker().concurrencyLevel
(Runtime.getRuntime().availableProcessors()).makeMap();
}
@Override
public void addHeaders(Map<String, String> headers) {
ConcurrentMap<String, String> newHeaders = new MapMaker().concurrencyLevel(1).makeMap();
newHeaders.putAll(headers);
this.additionalHeaders = newHeaders;
}
@Override
public void addHeaders(Map<String, String> headers) {
ConcurrentMap<String, String> newHeaders = new MapMaker().concurrencyLevel(1).makeMap();
newHeaders.putAll(headers);
this.additionalHeaders = newHeaders;
}
public static void main(String[] args) {
/**
* expiration(3, TimeUnit.SECONDS)设置超时时间为3秒
*/
ConcurrentMap<String , String> map = new MapMaker().concurrencyLevel(32).softKeys().weakValues()
.expiration(3, TimeUnit.SECONDS).makeComputingMap(
/**
* 提供当Map里面不包含所get的项,可以自动加入到Map的功能
* 可以将这里的返回值放到对应的key的value中
*/
new Function<String, String>() {
public String apply(String s) {
return "creating " + s + " -> Object";
}
}
);
map.put("a","testa");
map.put("b","testb");
System.out.println(map.get("a"));
System.out.println(map.get("b"));
System.out.println(map.get("c"));
try {
// 4秒后,大于超时时间,缓存失效。
Thread.sleep(4000);
} catch (InterruptedException e) {
e.printStackTrace();
}
System.out.println(map.get("a"));
System.out.println(map.get("b"));
System.out.println(map.get("c"));
}
public void init() {
log.info("INIT: session clustering=" + isSessionClusteringEnabled());
if (isSessionClusteringEnabled()) {
sessionCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.cache");
stashingCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.stash");
sessionClassWhitelist = new HashSet<String>(4); // number should match items count below
sessionClassWhitelist.add(Locale.class.getName());
sessionClassWhitelist.add("org.sakaiproject.event.api.SimpleEvent");
sessionClassWhitelist.add("org.sakaiproject.authz.api.SimpleRole");
sessionClassWhitelist.add("org.apache.commons.lang.mutable.MutableLong");
sessionAttributeBlacklist = new HashSet<String>(6); // number should match items count below
sessionAttributeBlacklist.add(SESSION_LAST_BREAKDOWN_KEY);
sessionAttributeBlacklist.add(SESSION_LAST_REBUILD_KEY);
/* from BasePreferencesService.ATTR_PREFERENCE_IS_NULL
* This controls whether the session cached version of prefs is reloaded or assumed to be populated,
* when it is true the processing assumes it is populated (very weird logic and dual-caching)
*/
sessionAttributeBlacklist.add("attr_preference_is_null");
/* from BasePreferencesService.ATTR_PREFERENCE
* rebuild this manually on demand from the cache instead of storing it
*/
sessionAttributeBlacklist.add("attr_preference");
/** should be re-detected on rebuild of the session */
sessionAttributeBlacklist.add("is_mobile_device");
/** this is normally only set on login, we handle it specially on breakdown and rebuild */
sessionAttributeBlacklist.add(UsageSessionService.USAGE_SESSION_KEY);
}
/* Create a map with weak references to the values */
breakdownableHandlers = new MapMaker().weakValues().makeMap();
}
public void init() {
log.info("INIT: session clustering=" + isSessionClusteringEnabled());
if (isSessionClusteringEnabled()) {
sessionCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.cache");
stashingCache = memoryService.newCache("org.sakaiproject.tool.impl.RebuildBreakdownService.stash");
sessionClassWhitelist = new HashSet<String>(4); // number should match items count below
sessionClassWhitelist.add(Locale.class.getName());
sessionClassWhitelist.add("org.sakaiproject.event.api.SimpleEvent");
sessionClassWhitelist.add("org.sakaiproject.authz.api.SimpleRole");
sessionClassWhitelist.add("org.apache.commons.lang.mutable.MutableLong");
sessionAttributeBlacklist = new HashSet<String>(6); // number should match items count below
sessionAttributeBlacklist.add(SESSION_LAST_BREAKDOWN_KEY);
sessionAttributeBlacklist.add(SESSION_LAST_REBUILD_KEY);
/* from BasePreferencesService.ATTR_PREFERENCE_IS_NULL
* This controls whether the session cached version of prefs is reloaded or assumed to be populated,
* when it is true the processing assumes it is populated (very weird logic and dual-caching)
*/
sessionAttributeBlacklist.add("attr_preference_is_null");
/* from BasePreferencesService.ATTR_PREFERENCE
* rebuild this manually on demand from the cache instead of storing it
*/
sessionAttributeBlacklist.add("attr_preference");
/** should be re-detected on rebuild of the session */
sessionAttributeBlacklist.add("is_mobile_device");
/** this is normally only set on login, we handle it specially on breakdown and rebuild */
sessionAttributeBlacklist.add(UsageSessionService.USAGE_SESSION_KEY);
}
/* Create a map with weak references to the values */
breakdownableHandlers = new MapMaker().weakValues().makeMap();
}
private <T> Set<T> createRelativeSet() {
ConcurrentMap<T, Boolean> map = new MapMaker().weakKeys().makeMap();
return Sets.newSetFromMap(map);
}
@Override
public void start() {
super.start();
positions = new MapMaker().makeMap();
}
public ClasspathInferer() {
this.classPathCache = new MapMaker().weakKeys().makeMap();
}
public DefaultPayloadClassLoaderRegistry(ModelClassLoaderFactory modelClassLoaderFactory) {
classLoaderDetails = new MapMaker().weakKeys().makeMap();
classLoaderIds = new MapMaker().weakValues().makeMap();
this.classLoaderFactory = modelClassLoaderFactory;
}
public ClasspathInferer() {
this.classPathCache = new MapMaker().weakKeys().makeMap();
}
public Counts(int initSize) {
this.mapMaker = new MapMaker().initialCapacity(initSize);
this.counts = mapMaker.makeMap();
}
public DirtyStateManager() {
managedResources = new MapMaker().makeMap();
}
/**
* Creates a new, empty ConcurrentHashSet.
*/
public WeakConcurrentHashSet() {
delegate = new MapMaker().weakKeys().concurrencyLevel
(Runtime.getRuntime().availableProcessors()).makeMap();
}
LargeLazyStriped(int stripes, Supplier<L> supplier) {
super(stripes);
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
this.supplier = supplier;
this.locks = new MapMaker().weakValues().makeMap();
}
LargeLazyStriped(int stripes, Supplier<L> supplier) {
super(stripes);
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
this.supplier = supplier;
this.locks = new MapMaker().weakValues().makeMap();
}
LargeLazyStriped(int stripes, Supplier<L> supplier) {
super(stripes);
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
this.supplier = supplier;
this.locks = new MapMaker().weakValues().makeMap();
}
LargeLazyStriped(int stripes, Supplier<L> supplier) {
super(stripes);
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
this.supplier = supplier;
this.locks = new MapMaker().weakValues().makeMap();
}
LargeLazyStriped(int stripes, Supplier<L> supplier) {
super(stripes);
this.size = (mask == ALL_SET) ? Integer.MAX_VALUE : mask + 1;
this.supplier = supplier;
this.locks = new MapMaker().weakValues().makeMap();
}
public StandaloneMemoryStore(KeyStore keystore, long timespan, long highwatermark, long lowwatermark) {
this.keystore = keystore;
this.aesKey = this.keystore.getKey(KeyStore.AES_LEVELDB_DATA);
//this.series = new ConcurrentHashMap<BigInteger,GTSEncoder>();
this.series = new MapMaker().concurrencyLevel(64).makeMap();
this.timespan = timespan;
this.highwatermark = highwatermark;
this.lowwatermark = lowwatermark;
//
// Add a shutdown hook to dump the memory store on exit
//
String storeDumpProp = WarpConfig.getProperty(io.warp10.continuum.Configuration.STANDALONE_MEMORY_STORE_DUMP);
if (null != storeDumpProp) {
final StandaloneMemoryStore self = this;
final String path = storeDumpProp;
Thread dumphook = new Thread() {
@Override
public void run() {
try {
self.dump(path);
} catch (IOException ioe) {
ioe.printStackTrace();
throw new RuntimeException(ioe);
}
}
};
Runtime.getRuntime().addShutdownHook(dumphook);
//
// Make sure ShutdownHookManager is initialized, otherwise it will try to
// register a shutdown hook during the shutdown hook we just registered...
//
ShutdownHookManager.get();
}
this.setDaemon(true);
this.setName("[StandaloneMemoryStore Janitor]");
this.setPriority(Thread.MIN_PRIORITY);
this.start();
}
public StandaloneChunkedMemoryStore(Properties properties, KeyStore keystore) {
this.properties = properties;
this.series = new MapMaker().concurrencyLevel(64).makeMap();
if ("true".equals(properties.getProperty(io.warp10.continuum.Configuration.IN_MEMORY_EPHEMERAL))) {
this.chunkcount = 1;
this.chunkspan = Long.MAX_VALUE;
this.ephemeral = true;
} else {
this.chunkcount = Integer.parseInt(properties.getProperty(io.warp10.continuum.Configuration.IN_MEMORY_CHUNK_COUNT, "3"));
this.chunkspan = Long.parseLong(properties.getProperty(io.warp10.continuum.Configuration.IN_MEMORY_CHUNK_LENGTH, Long.toString(Long.MAX_VALUE)));
this.ephemeral = false;
}
this.labelsKeyLongs = SipHashInline.getKey(keystore.getKey(KeyStore.SIPHASH_LABELS));
this.classKeyLongs = SipHashInline.getKey(keystore.getKey(KeyStore.SIPHASH_CLASS));
//
// Add a shutdown hook to dump the memory store on exit
//
if (null != properties.getProperty(io.warp10.continuum.Configuration.STANDALONE_MEMORY_STORE_DUMP)) {
final StandaloneChunkedMemoryStore self = this;
final String path = properties.getProperty(io.warp10.continuum.Configuration.STANDALONE_MEMORY_STORE_DUMP);
Thread dumphook = new Thread() {
@Override
public void run() {
try {
self.dump(path);
} catch (IOException ioe) {
ioe.printStackTrace();
throw new RuntimeException(ioe);
}
}
};
Runtime.getRuntime().addShutdownHook(dumphook);
//
// Make sure ShutdownHookManager is initialized, otherwise it will try to
// register a shutdown hook during the shutdown hook we just registered...
//
ShutdownHookManager.get();
}
this.setDaemon(true);
this.setName("[StandaloneChunkedMemoryStore Janitor]");
this.setPriority(Thread.MIN_PRIORITY);
this.start();
}