下面列出了怎么用org.springframework.data.redis.core.HashOperations的API类实例代码及写法,或者点击链接到github查看源代码。
/**
* 通过id查找招聘信息
**/
public RecruitEntity getRecruitById(Long id) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
RecruitEntity recruitRet;
if (redisHash.hasKey(Constant.RECRUIT_REDIS_PREFIX, String.valueOf(id))) {
recruitRet = JSONObject.parseObject(redisHash.get(Constant.RECRUIT_REDIS_PREFIX, String.valueOf(id)), RecruitEntity.class);
} else {
Optional<RecruitEntity> recruit = recruitRepo.findById(id);
if (recruit.isPresent()) {
recruitRet = recruit.get();
recruitRet.deserializeFields();
} else {
throw new MyException(ResultEnum.RECRUIT_NOT_EXIST);
}
}
return recruitRet;
}
@Override
public void onApplicationEvent(ApplicationReadyEvent event) {
log.info("》》》》》》》》》》城风已就绪《《《《《《《《《《");
CommunityNoticeMapper communityNoticeMapper = event.getApplicationContext().getBean(CommunityNoticeMapper.class);
ProperNoticeMapper properNoticeMapper = event.getApplicationContext().getBean(ProperNoticeMapper.class);
StringRedisTemplate stringRedisTemplate=event.getApplicationContext().getBean(StringRedisTemplate.class);
List<CommunityNotice> communityNotices = communityNoticeMapper.selectAllCommunities();
List<ProperNotice> properNotices = properNoticeMapper.selectAllPropers();
ZSetOperations<String, String> zset = stringRedisTemplate.opsForZSet();
HashOperations<String, Object, Object> hash = stringRedisTemplate.opsForHash();
communityNotices.parallelStream()
.forEach(communityNotice ->{
zset.add(RedisConstant.COMMUNITY_NOTICE_ORDER+communityNotice.getCommunityId(),RedisConstant.COMMUNITY_NOTICE_PREFIX+communityNotice.getId(),
new DateTime(communityNotice.getShowtime()).getMillis());
hash.put(RedisConstant.COMMUNITY_NOTICES+communityNotice.getCommunityId(),RedisConstant.COMMUNITY_NOTICE_PREFIX+communityNotice.getId(),
JsonSerializableUtil.obj2String(communityNotice));
});
properNotices.parallelStream()
.forEach(properNotice -> {
zset.add(RedisConstant.PROPER_NOTICE_ORDER+properNotice.getUserId(),RedisConstant.PROPER_NOTICE_PREFIX+properNotice.getId(),
new DateTime(properNotice.getShowtime()).getMillis());
hash.put(RedisConstant.PROPER_NOTICES+properNotice.getUserId(),RedisConstant.PROPER_NOTICE_PREFIX+properNotice.getId(),
JsonSerializableUtil.obj2String(properNotice));
});
}
@Override
public List<ShieldStatusEntity> getGroupPush(long groupId, List<String> userIdList) {
final String groupSetKey = RedisKeys.concat(RedisKeys.GROUP_INFO, groupId, RedisKeys.SETTING_INFO);
HashOperations<String, String, String> groupMapOps = redisTemplate.opsForHash();
List<String> statusList = groupMapOps.multiGet(groupSetKey, userIdList);
List<ShieldStatusEntity> shieldStatusList = new ArrayList<>();
for (int i = 0; i < userIdList.size(); i++) {
String status = statusList.get(i);
String userId = userIdList.get(i);
ShieldStatusEntity shieldStatus = new ShieldStatusEntity();
shieldStatus.setUserId(Long.valueOf(userId));
shieldStatus.setShieldStatus(status == null?DBConstant.GROUP_STATUS_ONLINE: DBConstant.GROUP_STATUS_SHIELD);
shieldStatus.setUserToken(userTokenService.getToken(userId));
shieldStatusList.add(shieldStatus);
}
return shieldStatusList;
}
/**
* 处理简历
*/
public void handleResume(SendResume sendResume) {
log.info("start handle resume");
HashOperations<String, String, String> redisHash = redis.opsForHash();
long start = System.currentTimeMillis();
Long userId = sendResume.getUserId();
Long recruitId = sendResume.getRecruitId();
// 构建发送列表
String sendKey = Constant.getKey(RedisKeys.RESUME_SEND, String.valueOf(userId));
redisHash.put(sendKey, String.valueOf(recruitId), JSONObject.toJSONString(sendResume));
// 构建获取列表
String receiveKey = Constant.getKey(RedisKeys.RESUME_RECEIVE, String.valueOf(recruitId));
UserInfo userInfo = userClient.getUserInfo(userId).getData();
// 计算匹配度
Recruit recruit = recruitClient.getRecruit(recruitId).getData();
int rate = calculate(recruit, userId);
ReceiveResume receiveResume = new ReceiveResume(sendResume.getTitle(), userInfo.getNickname(), userInfo.getUserId(), rate, LocalDateTime.now());
redisHash.put(receiveKey, String.valueOf(userId), JSONObject.toJSONString(receiveResume));
log.info("end handle resume spend time is " + (System.currentTimeMillis() - start));
}
@Override
public void delete( int _idx, T _obj ) throws Exception{
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
ID hk = _get_hk( _obj );
String sn = _obj.getClass().getSimpleName();
if( hash.hasKey( sn, hk ) ){
long num = hash.delete( sn, hk );
log.debug( "--已删除纪录:" + num + "条" );
}else
throw new Exception( "删除失败,包含主键[" + hk + "]的对象[" + sn + "]不存在!" );
}catch ( Exception e ){
log.error( "update 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
throw e;
}
}
/**
* 消息用户计数
* @param userCountReq 会话信息
* @return 更新结果
* @since 1.0
*/
@PostMapping("/clearUserCounter")
public BaseModel<?> clearUserCounter(@RequestBody ClearUserCountReq userCountReq) {
HashOperations<String, String, String> hashOptions = redisTemplate.opsForHash();
if (userCountReq.getSessionType() == IMBaseDefine.SessionType.SESSION_TYPE_SINGLE) {
// Clear P2P msg Counter
final String userKey = RedisKeys.concat(RedisKeys.USER_UNREAD, userCountReq.getUserId());
hashOptions.delete(userKey, String.valueOf(userCountReq.getPeerId()));
} else if (userCountReq.getSessionType() == IMBaseDefine.SessionType.SESSION_TYPE_GROUP) {
// Clear Group msg Counter
final String groupSetKey = RedisKeys.concat(RedisKeys.GROUP_INFO, userCountReq.getPeerId(), RedisKeys.SETTING_INFO);
final String countValue = hashOptions.get(groupSetKey, RedisKeys.COUNT);
final String userUnreadKey = RedisKeys.concat(RedisKeys.GROUP_UNREAD, userCountReq.getUserId());
hashOptions.put(userUnreadKey, String.valueOf(userCountReq.getPeerId()), countValue);
} else {
logger.warn("参数不正: SessionType={}", userCountReq.getSessionType());
}
return null;
}
/**
* 通过id查找招聘信息
**/
public RecruitEntity getRecruitById(Long id) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
RecruitEntity recruitRet;
if (redisHash.hasKey(Constant.RECRUIT_REDIS_PREFIX, String.valueOf(id))) {
recruitRet = JSONObject.parseObject(redisHash.get(Constant.RECRUIT_REDIS_PREFIX, String.valueOf(id)), RecruitEntity.class);
} else {
Optional<RecruitEntity> recruit = recruitRepo.findById(id);
if (recruit.isPresent()) {
recruitRet = recruit.get();
recruitRet.deserializeFields();
} else {
throw new MyException(ResultEnum.RECRUIT_NOT_EXIST);
}
}
return recruitRet;
}
@Test
public void hash() throws InterruptedException {
HashOperations hashOperations = redisTemplate.opsForHash();
Info info1 = new Info(1001, "Hong");
Info info2 = new Info(1002, "Kong");
//is exist
if (hashOperations.getOperations().hasKey("info_1001")) {
//delete
hashOperations.delete("info_1001", "1001");
hashOperations.delete("info_1002", "1002");
Thread.sleep(3000);
}
//put
hashOperations.put("info_1001", "1001", info1);
hashOperations.put("info_1002", "1002", info2);
//get
Info info = (Info) hashOperations.get("info_1001", "1001");
System.out.println();
System.out.println(info);
}
@Override
public void insert ( int _idx, T _obj ) throws Exception{
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
ID hk = _get_hk( _obj );
String sn = _obj.getClass().getSimpleName();
if( hash.hasKey( sn, hk ) ){
throw new Exception( "插入失败,包含主键[" + hk + "]的对象[" + sn + "]已存在!" );
}
hash.put( _obj.getClass().getSimpleName(), hk, _obj );
}catch ( Exception e ){
log.error( "insert 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
throw e;
}
}
@Override
public T findById ( int _idx, ID _value, Class< T > _cls ) {
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
Object obj = hash.get( _cls.getSimpleName(), _value );
return ( T ) obj;
}catch ( Exception e ) {
log.error( "findById 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
throw e;
}
}
private List<RecruitEntity> getAllRecruit(String key) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> entries = redisHash.entries(Constant.RECRUIT_REDIS_PREFIX);
if (!entries.isEmpty()) {
// 获取全量的招聘信息
if (StringUtils.isBlank(key)) {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.collect(Collectors.toList());
} else {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.filter(c -> c.getTitle().contains(key))
.collect(Collectors.toList());
}
}
List<RecruitEntity> recruitList;
if (StringUtils.isBlank(key)) {
recruitList = recruitRepo.findAll();
recruitList.forEach(RecruitEntity::deserializeFields);
cacheRecruit(recruitList);
return recruitList;
}
// 由于做全量缓存,所以模糊查询时不进行缓存
return recruitRepo.findByTitleIsLike("%" + key + "%");
}
private void cacheRecruit(List<RecruitEntity> recruitList) {
if (!recruitList.isEmpty()) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> redisMap = recruitList
.stream()
.collect(Collectors.toMap(r -> String.valueOf(r.getId()), JSONObject::toJSONString));
redisHash.putAll(Constant.RECRUIT_REDIS_PREFIX, redisMap);
// 缓存七天
redis.expire(Constant.RECRUIT_REDIS_PREFIX, 7, TimeUnit.DAYS);
}
}
@Override
public String getToken(long userId) {
HashOperations<String, String, String> opsHash = redisTemplate.opsForHash();
String key = RedisKeys.concat(RedisKeys.USER_INFO, userId);
return opsHash.get(key, RedisKeys.USER_TOKEN);
}
/**
* 获取所有公司
**/
public List<CompanyEntity> getAllCompany(String key) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> entries = redisHash.entries(Constant.COMPANY_REDIS_PREFIX);
if (!entries.isEmpty()) {
// 获取全量的公司
if (StringUtils.isBlank(key)) {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), CompanyEntity.class))
.collect(Collectors.toList());
} else {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), CompanyEntity.class))
.filter(c -> c.getName().contains(key))
.collect(Collectors.toList());
}
}
List<CompanyEntity> companyList;
if (StringUtils.isBlank(key)) {
companyList = companyRepo.findAll();
// 未命中缓存时查询到数据时插入缓存中
cacheCompany(companyList);
return companyList;
}
// 由于做全量缓存,所以模糊查询时不进行缓存
return companyRepo.findByNameIsLike("%" + key + "%");
}
private CompanyEntity getCompanyById(Long id) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
CompanyEntity company;
if (redisHash.hasKey(Constant.COMPANY_REDIS_PREFIX, String.valueOf(id))) {
company = JSONObject.parseObject(redisHash.get(Constant.COMPANY_REDIS_PREFIX, String.valueOf(id)), CompanyEntity.class);
} else {
company = companyRepo.findById(id).orElse(null);
}
return company;
}
private List<RecruitEntity> findRecruitByTitleIn(List<String> jobList) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> entries = redisHash.entries(Constant.RECRUIT_REDIS_PREFIX);
if (!entries.isEmpty()) {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.filter(r -> jobList.contains(r.getTitle()))
.collect(Collectors.toList());
}
return recruitRepo.findByTitleIn(jobList);
}
/**
* 获取投递简历列表
*/
public SendInfo getSendList() {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Long userId = UserHolder.get().getId();
String sendKey = Constant.getKey(RedisKeys.RESUME_SEND, String.valueOf(userId));
List<SendResume> ret = new ArrayList<>();
Map<String, String> entries = redisHash.entries(sendKey);
entries.forEach((k, v) -> ret.add(JSONObject.parseObject(v, SendResume.class)));
return new SendInfo(ret);
}
@Override
public Map< ID, T > findHashAll( int _idx, Class< T > _cls ){
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
return hash.entries( _cls.getSimpleName() );
}catch ( Exception e ){
log.error( "findHashAll 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
}
return new HashMap<>();
}
@Override
public void updateMap ( int _idx, Map< String, T > _map, Class< T > _cls ) {
try{
database( _idx );
HashOperations< String, String, T > hash = tpl.opsForHash();
String sn = _cls.getClass().getSimpleName();
hash.putAll( sn, _map );
}catch ( Exception e ){
log.error( "update 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
throw e;
}
}
@Override
public String getToken(String userId) {
HashOperations<String, String, String> opsHash = redisTemplate.opsForHash();
String key = RedisKeys.concat(RedisKeys.USER_INFO, userId);
return opsHash.get(key, RedisKeys.USER_TOKEN);
}
@Override
public void update ( int _idx, T _obj ){
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
ID hk = _get_hk( _obj );
hash.put( _obj.getClass().getSimpleName(), hk, _obj );
}catch ( Exception e ){
log.error( "update 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
throw e;
}
}
@Override
public void deleteById( int _idx, ID _id, Class< T > _cls ){
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
ID hk = _id;
String sn = _cls.getSimpleName();
if( hash.hasKey( sn, hk ) ){
hash.delete( sn, hk );
}else
throw new Exception( "删除失败,包含主键[" + hk + "]的对象[" + sn + "]不存在!" );
}catch ( Exception e ){
log.error( "update 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
}
}
private List<RecruitEntity> getAllRecruit(String key) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> entries = redisHash.entries(Constant.RECRUIT_REDIS_PREFIX);
if (!entries.isEmpty()) {
// 获取全量的招聘信息
if (StringUtils.isBlank(key)) {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.collect(Collectors.toList());
} else {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.filter(c -> c.getTitle().contains(key))
.collect(Collectors.toList());
}
}
List<RecruitEntity> recruitList;
if (StringUtils.isBlank(key)) {
recruitList = recruitRepo.findAll();
recruitList.forEach(RecruitEntity::deserializeFields);
cacheRecruit(recruitList);
return recruitList;
}
// 由于做全量缓存,所以模糊查询时不进行缓存
return recruitRepo.findByTitleIsLike("%" + key + "%");
}
private void cacheRecruit(List<RecruitEntity> recruitList) {
if (!recruitList.isEmpty()) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> redisMap = recruitList
.stream()
.collect(Collectors.toMap(r -> String.valueOf(r.getId()), JSONObject::toJSONString));
redisHash.putAll(Constant.RECRUIT_REDIS_PREFIX, redisMap);
// 缓存七天
redis.expire(Constant.RECRUIT_REDIS_PREFIX, 7, TimeUnit.DAYS);
}
}
private void cacheCompany(List<CompanyEntity> companyList) {
if (!companyList.isEmpty()) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> redisMap = companyList
.stream()
.collect(Collectors.toMap(c -> String.valueOf(c.getId()), JSONObject::toJSONString));
redisHash.putAll(Constant.COMPANY_REDIS_PREFIX, redisMap);
// 缓存七天
redis.expire(Constant.COMPANY_REDIS_PREFIX, 7, TimeUnit.DAYS);
}
}
private CompanyEntity getCompanyById(Long id) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
CompanyEntity company;
if (redisHash.hasKey(Constant.COMPANY_REDIS_PREFIX, String.valueOf(id))) {
company = JSONObject.parseObject(redisHash.get(Constant.COMPANY_REDIS_PREFIX, String.valueOf(id)), CompanyEntity.class);
} else {
company = companyRepo.findById(id).orElse(null);
}
return company;
}
private List<RecruitEntity> findRecruitByTitleIn(List<String> jobList) {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Map<String, String> entries = redisHash.entries(Constant.RECRUIT_REDIS_PREFIX);
if (!entries.isEmpty()) {
return entries
.entrySet()
.stream()
.map(e -> JSONObject.parseObject(e.getValue(), RecruitEntity.class))
.filter(r -> jobList.contains(r.getTitle()))
.collect(Collectors.toList());
}
return recruitRepo.findByTitleIn(jobList);
}
/**
* 获取投递简历列表
*/
public SendInfo getSendList() {
HashOperations<String, String, String> redisHash = redis.opsForHash();
Long userId = UserHolder.get().getId();
String sendKey = Constant.getKey(RedisKeys.RESUME_SEND, String.valueOf(userId));
List<SendResume> ret = new ArrayList<>();
Map<String, String> entries = redisHash.entries(sendKey);
entries.forEach((k, v) -> ret.add(JSONObject.parseObject(v, SendResume.class)));
return new SendInfo(ret);
}
/**
* 获取收到的简历列表
**/
public ReceiveInfo getReceiveList() {
HashOperations<String, String, String> redisHash = redis.opsForHash();
// 多个岗位需要叠加
Long userId = UserHolder.get().getId();
List<Recruit> recruitList = recruitClient.getRecruitList(userId).getData();
List<ReceiveResume> ret = new ArrayList<>();
recruitList.forEach(recruit -> {
String receiveKey = Constant.getKey(RedisKeys.RESUME_RECEIVE, String.valueOf(recruit.getId()));
Map<String, String> entries = redisHash.entries(receiveKey);
entries.forEach((k, v) -> ret.add(JSONObject.parseObject(v, ReceiveResume.class)));
});
ret.sort(Comparator.comparing(ReceiveResume::getRate).reversed());
return new ReceiveInfo(ret);
}
@Override
public List< T > findAll( int _idx, Class< T > _cls ) {
try{
database( _idx );
HashOperations< String, ID, T > hash = tpl.opsForHash();
List< T > list = hash.values( _cls.getSimpleName() );
return list;
}catch ( Exception e ){
log.error( "findAll 发生错误:IDX=[" + _idx + "]" + e.toString(), e );
}
return new ArrayList<>();
}