下面列出了java.util.concurrent.ConcurrentHashMap#keySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public synchronized boolean removeCookie(HttpUrl url) {
if (!cookies.containsKey(url.host())) return false;
//内存移除
ConcurrentHashMap<String, Cookie> urlCookie = cookies.remove(url.host());
//文件移除
Set<String> cookieTokens = urlCookie.keySet();
SharedPreferences.Editor prefsWriter = cookiePrefs.edit();
for (String cookieToken : cookieTokens) {
if (cookiePrefs.contains(COOKIE_NAME_PREFIX + cookieToken)) {
prefsWriter.remove(COOKIE_NAME_PREFIX + cookieToken);
}
}
prefsWriter.remove(url.host());
prefsWriter.apply();
return true;
}
/**
* KeySetView.getMappedValue returns the map's mapped value
*/
public void testGetMappedValue() {
ConcurrentHashMap map = map5();
assertNull(((ConcurrentHashMap.KeySetView) map.keySet()).getMappedValue());
try {
map.keySet(null);
shouldThrow();
} catch (NullPointerException success) {}
ConcurrentHashMap.KeySetView set = map.keySet(one);
assertFalse(set.add(one));
assertTrue(set.add(six));
assertTrue(set.add(seven));
assertTrue(set.getMappedValue() == one);
assertTrue(map.get(one) != one);
assertTrue(map.get(six) == one);
assertTrue(map.get(seven) == one);
}
public Set<MessageQueue> fetchMessageQueuesInBalance(String topic) throws MQClientException {
this.makeSureStateOK();
if (null == topic) {
throw new IllegalArgumentException("topic is null");
}
ConcurrentHashMap<MessageQueue, ProcessQueue> mqTable = this.rebalanceImpl.getProcessQueueTable();
Set<MessageQueue> mqResult = new HashSet<MessageQueue>();
for (MessageQueue mq : mqTable.keySet()) {
if (mq.getTopic().equals(topic)) {
mqResult.add(mq);
}
}
return mqResult;
}
public Set<MessageQueue> fetchMessageQueuesInBalance(String topic) throws MQClientException {
this.makeSureStateOK();
if (null == topic) {
throw new IllegalArgumentException("topic is null");
}
ConcurrentHashMap<MessageQueue, ProcessQueue> mqTable = this.rebalanceImpl.getProcessQueueTable();
Set<MessageQueue> mqResult = new HashSet<MessageQueue>();
for (MessageQueue mq : mqTable.keySet()) {
if (mq.getTopic().equals(topic)) {
mqResult.add(mq);
}
}
return mqResult;
}
@SuppressWarnings("unchecked")
@Path("/topology/{cpName}")
@GET
@Consumes(MediaType.TEXT_PLAIN)
@Produces(MediaType.APPLICATION_JSON)
public String getConnectionPoolToplogy(@PathParam("cpName") String cpName) {
TokenPoolTopology topology = MonitorConsole.getInstance().getTopology(cpName);
if (topology == null) {
return "Not Found: " + cpName;
}
ConcurrentHashMap<String, List<TokenStatus>> map = topology.getAllTokens();
JSONObject json = new JSONObject();
for (String rack : map.keySet()) {
List<TokenStatus> tokens = map.get(rack);
json.put(rack, getTokenStatusMap(tokens));
}
return json.toJSONString();
}
/**
* keySet.add adds the key with the established value to the map;
* remove removes it.
*/
public void testKeySetAddRemove() {
ConcurrentHashMap map = map5();
Set set1 = map.keySet();
Set set2 = map.keySet(true);
set2.add(six);
assertTrue(((ConcurrentHashMap.KeySetView)set2).getMap() == map);
assertTrue(((ConcurrentHashMap.KeySetView)set1).getMap() == map);
assertEquals(set2.size(), map.size());
assertEquals(set1.size(), map.size());
assertTrue((Boolean)map.get(six));
assertTrue(set1.contains(six));
assertTrue(set2.contains(six));
set2.remove(six);
assertNull(map.get(six));
assertFalse(set1.contains(six));
assertFalse(set2.contains(six));
}
/**
* keySet.toArray returns contains all keys
*/
public void testKeySetToArray() {
ConcurrentHashMap map = map5();
Set s = map.keySet();
Object[] ar = s.toArray();
assertTrue(s.containsAll(Arrays.asList(ar)));
assertEquals(5, ar.length);
ar[0] = m10;
assertFalse(s.containsAll(Arrays.asList(ar)));
}
protected void deleteAllMarkedPoints() {
for (final EditableDataSet dataSet : markedPoints.keySet()) {
final ConcurrentHashMap<Integer, SelectedDataPoint> dataPoints = markedPoints.get(dataSet);
for (final Integer dataPointIndex : dataPoints.keySet()) {
final SelectedDataPoint dataPoint = dataPoints.get(dataPointIndex);
if (dataPoint.delete()) {
dataPoints.remove(dataPointIndex);
}
}
}
updateMarker();
}
protected void updateMarker() {
this.markerPane.getChildren().clear();
markerPane.getParent().setMouseTransparent(false);
for (final EditableDataSet dataSet : markedPoints.keySet()) {
final ConcurrentHashMap<Integer, SelectedDataPoint> dataPoints = markedPoints.get(dataSet);
if (dataPoints == null) {
// workaround... key shouldn't be here in the first place
continue;
}
for (final Integer dataPointIndex : dataPoints.keySet()) {
final SelectedDataPoint dataPoint = dataPoints.get(dataPointIndex);
// dataPoint.setOnMouseClicked(evt -> {
// if (evt.isSecondaryButtonDown()) {
// // right clicked on circle
// }
// });
dataPoint.update();
markerPane.getChildren().add(dataPoint);
}
}
if (markerPane.getChildren().isEmpty()) {
markerPane.getParent().setMouseTransparent(true);
}
}
private void toastFaultsOrAlertsMessage(Message msg) {
StringBuilder sb = new StringBuilder();
ConcurrentHashMap<String, Object> map = (ConcurrentHashMap<String, Object>) msg.obj;
for (String key : map.keySet()) {
if ((Boolean) map.get(key)) {
sb.append(key + "1" + "\r\n");
}
}
if (sb.length() != 0) {
Toast.makeText(GosDeviceControlActivity.this, sb.toString(), Toast.LENGTH_SHORT).show();
}
}
/**
* Validate the updated metadata.
*/
@Test
public void step6_testVerifyMetadataChange() {
ConcurrentHashMap<String, XmlMetadata> fromFile = metaFileUtil.getMetadata();
Set<String> from = fromFile.keySet();
Set<String> modified = modifiedMap.keySet();
Assert.assertTrue(from.containsAll(modified));
for (String key : modified) {
Assert.assertEquals(TestConstants.getXMLMetadataString(modifiedMap.get(key)),
TestConstants.getXMLMetadataString(fromFile.get(key)));
}
}
protected TableDescriptor selectTable(ConcurrentHashMap<TableName, TableDescriptor> tableMap)
{
// synchronization to prevent removal from multiple threads
synchronized (tableMap){
// randomly select table from tableMap
if (tableMap.isEmpty()) {
return null;
}
ArrayList<TableName> tableList = new ArrayList<>(tableMap.keySet());
TableName randomKey = tableList.get(RandomUtils.nextInt(0, tableList.size()));
TableDescriptor randomTd = tableMap.remove(randomKey);
return randomTd;
}
}
/**
* Validate the content of the file.
*/
@Test
public void step3_testValidateMetadataFile() {
ConcurrentHashMap<String, XmlMetadata> fromFile = metaFileUtil.getMetadata();
Set<String> from = fromFile.keySet();
Set<String> initial = initialMap.keySet();
Assert.assertTrue(from.containsAll(initial));
for (String key : initial) {
Assert.assertEquals(TestConstants.getXMLMetadataString(initialMap.get(key)),
TestConstants.getXMLMetadataString(fromFile.get(key)));
}
}
/**
* Description:向云端创建定时任务
*
* @param date
* 执行日期,格式为:"2015-01-01"。
* @param time
* 执行时间,格式为:"10:10", 注意:该时间为 UTC 时间!
* @param repeat
* 重复类型通过 repeat
* 参数进行设置,不重复设置为"none",默认执行日期为当天,并且执行时间不能早于当前时间;重复设置为 "mon",
* "tue", "wed", "thu", "fri", "sat", "sun"
* 的组合,组合之间用逗号分隔,如每周一和周二重复为 "mon,tue"。
* @param attrs
* 定时任务中需要更改的数据点及值,格式为"attr1": val;
* @param succeed
* 创建成功回调函数
* @param failed
* 创建失败回调函数
*/
public void setCommadOnSite(String date, String time, String repeat, ConcurrentHashMap<String, Object> attrs,
OnResponListener respon) {
String httpurl = "http://api.gizwits.com/app/scheduler";
JSONObject jsonsend = new JSONObject();
try {
JSONObject jsonCammad = new JSONObject();
for (String key : attrs.keySet()) {
jsonCammad.put(key, attrs.get(key));
}
JSONObject task = new JSONObject();
task.put("did", device.getDid());
task.put("product_key", device.getProductKey());
task.put("attrs", jsonCammad);
JSONArray jsonArray = new JSONArray();
jsonArray.put(task);
jsonsend.put("date", date);
jsonsend.put("time", time);
jsonsend.put("repeat", repeat);
jsonsend.put("task", jsonArray);
jsonsend.put("retry_count", 3);
jsonsend.put("retry_task", "failed");
Log.i("onSite", jsonsend.toString());
} catch (JSONException e) {
e.printStackTrace();
}
sendDateToSite(httpurl, jsonsend, respon);
}
/**
* keySet returns a Set containing all the keys
*/
public void testKeySet() {
ConcurrentHashMap map = map5();
Set s = map.keySet();
assertEquals(5, s.size());
assertTrue(s.contains(one));
assertTrue(s.contains(two));
assertTrue(s.contains(three));
assertTrue(s.contains(four));
assertTrue(s.contains(five));
}
/**
* 计算用户与用户之间的相似性,返回计算出的用户与用户之间的相似度对象
* @param activeMap 用户对各个二级类目的购买行为的一个map集合
* @return 计算出的用户与用户之间的相似度的对象存储形式
*/
public static List<UserSimilarityDTO> calcSimilarityBetweenUsers(ConcurrentHashMap<Long, ConcurrentHashMap<Long, Long>> activeMap) {
// 用户之间的相似度对集合
List<UserSimilarityDTO> similarityList = new ArrayList<UserSimilarityDTO>();
// 获取所有的键的集合
Set<Long> userSet = activeMap.keySet();
// 把这些集合放入ArrayList中
List<Long> userIdList = new ArrayList<Long>(userSet);
// 小于两个说明当前map集合中只有一个map集合的购买行为,或者一个都没有,直接返回
if (userIdList.size() < 2) {
return similarityList;
}
// 计算所有的用户之间的相似度对
for (int i = 0; i < userIdList.size() - 1; i++) {
for (int j = i + 1; j < userIdList.size(); j++) {
// 分别获取两个用户对每个二级类目的点击量
ConcurrentHashMap<Long, Long> userCategory2Map = activeMap.get(userIdList.get(i));
ConcurrentHashMap<Long, Long> userRefCategory2Map = activeMap.get(userIdList.get(j));
// 获取两个map中二级类目id的集合
Set<Long> key1Set = userCategory2Map.keySet();
Set<Long> key2Set = userRefCategory2Map.keySet();
Iterator<Long> it1 = key1Set.iterator();
Iterator<Long> it2 = key2Set.iterator();
// 两用户之间的相似度
double similarity = 0.0;
// 余弦相似度公式中的分子
double molecule = 0.0;
// 余弦相似度公式中的分母
double denominator = 1.0;
// 余弦相似度公式中分母根号下的两个向量的模的值
double vector1 = 0.0;
double vector2 = 0.0;
while (it1.hasNext() && it2.hasNext()) {
Long it1Id = it1.next();
Long it2Id = it2.next();
// 获取二级类目对应的点击次数
Long hits1 = userCategory2Map.get(it1Id);
Long hits2 = userRefCategory2Map.get(it2Id);
// 累加分子
molecule += hits1 * hits2;
// 累加分母中的两个向量的模
vector1 += Math.pow(hits1, 2);
vector2 += Math.pow(hits2, 2);
}
// 计算分母
denominator = Math.sqrt(vector1) * Math.sqrt(vector2);
// 计算整体相似度
similarity = molecule / denominator;
// 创建用户相似度对对象
UserSimilarityDTO userSimilarityDTO = new UserSimilarityDTO();
userSimilarityDTO.setUserId(userIdList.get(i));
userSimilarityDTO.setUserRefId(userIdList.get(j));
userSimilarityDTO.setSimilarity(similarity);
// 将计算出的用户以及用户之间的相似度对象存入list集合
similarityList.add(userSimilarityDTO);
}
}
return similarityList;
}
public void decryptCheckThread() {
try {
while (stopRequest.getCount() > 0) {
maybeBarrier();
byte[] ref = new byte[3];
ThreadLocalRandom.current().nextBytes(ref);
ref[0] = 0;
ConcurrentHashMap<CacheTestFixtures.SentinelKey, Object> expectedDecryptMap
= possibleDecrypts.computeIfAbsent(ByteBuffer.wrap(ref),
ignored -> new ConcurrentHashMap<>());
synchronized (expectedDecryptMap) {
CryptoMaterialsCache.DecryptCacheEntry result = cache.getEntryForDecrypt(ref);
CacheTestFixtures.SentinelKey cachedKey = null;
if (result != null) {
inc("decrypt: hit");
cachedKey = (CacheTestFixtures.SentinelKey) result.getResult().getDataKey().getKey();
if (expectedDecryptMap.containsKey(cachedKey)) {
inc("decrypt: found key in expected");
} else {
fail("decrypt: unexpected key");
}
} else {
inc("decrypt: miss");
}
for (CacheTestFixtures.SentinelKey expectedKey : expectedDecryptMap.keySet()) {
if (cachedKey != expectedKey) {
inc("decrypt: prune");
expectedDecryptMap.remove(expectedKey);
}
}
}
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
@Test
public void testRateLimitDoesNotExceedSuppliedQps() throws Exception {
int qps = 10;
RateLimitExecutorService service = new RateLimitExecutorService();
service.setQueriesPerSecond(qps);
final ConcurrentHashMap<Integer, Integer> executedTimestamps = new ConcurrentHashMap<>();
for (int i = 0; i < 100; i++) {
Runnable emptyTask =
new Runnable() {
@Override
public void run() {
int nearestSecond = (int) (new Date().getTime() / 1000);
if (executedTimestamps.containsKey(nearestSecond)) {
executedTimestamps.put(nearestSecond, executedTimestamps.get(nearestSecond) + 1);
} else {
executedTimestamps.put(nearestSecond, 1);
}
}
};
service.execute(emptyTask);
}
// Sleep until finished, or 20s expires (to prevent waiting forever)
long sleepTime = 0;
while (sleepTime < 20000 && countTotalRequests(executedTimestamps) < 100) {
long sleepFor = TimeUnit.SECONDS.toMillis(1);
sleepTime += sleepFor;
Thread.sleep(sleepFor);
}
// Check that we executed at the correct rate
for (Integer timestamp : executedTimestamps.keySet()) {
Integer actualQps = executedTimestamps.get(timestamp);
// Logging QPS here to detect if a previous iteration had qps-1 and this is qps+1.
LOG.info(
String.format(
"Timestamp(%d) logged %d queries (target of %d qps)", timestamp, actualQps, qps));
assertTrue(
String.format("Expected <= %d queries in a second, got %d.", qps, actualQps),
actualQps <= qps);
}
// Check that we executed every request
assertEquals(100, countTotalRequests(executedTimestamps));
service.shutdown();
}
/**
* @param all ConcurrentHashMap<String, IncrementalStats>
* @return SimpleStats
*/
private SimpleStats aggregateResults(ConcurrentHashMap<String, IncrementalStats> all)
{
SimpleStats answer = null;
for (String key : all.keySet())
{
IncrementalStats next = all.get(key);
IncrementalStats stats = next.copy();
if (answer == null)
{
answer = new SimpleStats(stats.scale, this.infoSrv);
answer .start = stats.start;
answer.moments[0] = stats.moments[0];
answer.moments[1] = stats.moments[1];
answer.moments[2] = stats.moments[2];
answer.max = stats.max;
answer.min = stats.min;
answer.copies.put(key, stats);
}
else
{
SimpleStats newAnswer = new SimpleStats(answer.scale, this.infoSrv);
newAnswer.moments[0] = answer.moments[0] + stats.moments[0];
newAnswer.moments[1] = answer.moments[1] * answer.moments[0] + stats.moments[1] * stats.moments[0];
newAnswer.moments[1] /= answer.moments[0] + stats.moments[0];
newAnswer.moments[2] = answer.moments[2] * answer.moments[0];
newAnswer.moments[2] += (answer.moments[1] - newAnswer.moments[1]) * (answer.moments[1] - newAnswer.moments[1]) * answer.moments[0];
newAnswer.moments[2] += stats.moments[2] * stats.moments[0];
newAnswer.moments[2] += (stats.moments[1] - newAnswer.moments[1]) * (stats.moments[1] - newAnswer.moments[1]) * stats.moments[0];
newAnswer.moments[2] /= answer.moments[0] + stats.moments[0];
newAnswer.min = (stats.min < answer.min) ? stats.min : answer.min;
newAnswer.max = (stats.max > answer.max) ? stats.max : answer.max;
newAnswer.start = (stats.start.compareTo(answer.start) < 1) ? stats.start : answer.start;
newAnswer.copies.putAll(answer.copies);
newAnswer.copies.put(key, stats);
answer = newAnswer;
}
}
if (answer == null)
{
answer = new SimpleStats(1, this.infoSrv);
}
return answer;
}
/**
* Broker 主动通知 Consumer,offset 需要进行重置列表发生变化
*/
public RemotingCommand resetOffset(String topic, String group, long timeStamp, boolean isForce) {
final RemotingCommand response = RemotingCommand.createResponseCommand(null);
TopicConfig topicConfig = this.brokerController.getTopicConfigManager().selectTopicConfig(topic);
if (null == topicConfig) {
log.error("[reset-offset] reset offset failed, no topic in this broker. topic={}", topic);
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("[reset-offset] reset offset failed, no topic in this broker. topic=" + topic);
return response;
}
Map<MessageQueue, Long> offsetTable = new HashMap<MessageQueue, Long>();
for (int i = 0; i < topicConfig.getWriteQueueNums(); i++) {
MessageQueue mq = new MessageQueue();
mq.setBrokerName(this.brokerController.getBrokerConfig().getBrokerName());
mq.setTopic(topic);
mq.setQueueId(i);
long consumerOffset =
this.brokerController.getConsumerOffsetManager().queryOffset(group, topic, i);
if (-1 == consumerOffset) {
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark(String.format("THe consumer group <%s> not exist", group));
return response;
}
long timeStampOffset =
this.brokerController.getMessageStore().getOffsetInQueueByTime(topic, i, timeStamp);
if (isForce || timeStampOffset < consumerOffset) {
offsetTable.put(mq, timeStampOffset);
}
else {
offsetTable.put(mq, consumerOffset);
}
}
ResetOffsetRequestHeader requestHeader = new ResetOffsetRequestHeader();
requestHeader.setTopic(topic);
requestHeader.setGroup(group);
requestHeader.setTimestamp(timeStamp);
RemotingCommand request =
RemotingCommand.createRequestCommand(RequestCode.RESET_CONSUMER_CLIENT_OFFSET, requestHeader);
ResetOffsetBody body = new ResetOffsetBody();
body.setOffsetTable(offsetTable);
request.setBody(body.encode());
ConsumerGroupInfo consumerGroupInfo =
this.brokerController.getConsumerManager().getConsumerGroupInfo(group);
// Consumer在线
if (consumerGroupInfo != null && !consumerGroupInfo.getAllChannel().isEmpty()) {
ConcurrentHashMap<Channel, ClientChannelInfo> channelInfoTable =
consumerGroupInfo.getChannelInfoTable();
for (Channel channel : channelInfoTable.keySet()) {
int version = channelInfoTable.get(channel).getVersion();
if (version >= MQVersion.Version.V3_0_7_SNAPSHOT.ordinal()) {
try {
this.brokerController.getRemotingServer().invokeOneway(channel, request, 5000);
log.info("[reset-offset] reset offset success. topic={}, group={}, clientId={}",
new Object[] { topic, group, channelInfoTable.get(channel).getClientId() });
}
catch (Exception e) {
log.error("[reset-offset] reset offset exception. topic={}, group={}",
new Object[] { topic, group }, e);
}
}
else {
// 如果有一个客户端是不支持该功能的,则直接返回错误,需要应用方升级。
response.setCode(ResponseCode.SYSTEM_ERROR);
response.setRemark("the client does not support this feature. version="
+ MQVersion.getVersionDesc(version));
log.warn("[reset-offset] the client does not support this feature. version={}",
RemotingHelper.parseChannelRemoteAddr(channel), MQVersion.getVersionDesc(version));
return response;
}
}
}
// Consumer不在线
else {
String errorInfo = String.format(
"Consumer not online, so can not reset offset, Group: %s Topic: %s Timestamp: %d", //
requestHeader.getGroup(), //
requestHeader.getTopic(), //
requestHeader.getTimestamp());
log.error(errorInfo);
response.setCode(ResponseCode.CONSUMER_NOT_ONLINE);
response.setRemark(errorInfo);
return response;
}
response.setCode(ResponseCode.SUCCESS);
ResetOffsetBody resBody = new ResetOffsetBody();
resBody.setOffsetTable(offsetTable);
response.setBody(resBody.encode());
return response;
}