com.mongodb.client.MongoCursor#next ( )源码实例Demo

下面列出了com.mongodb.client.MongoCursor#next ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: camel-quarkus   文件: MongoDbResource.java
@GET
@Path("/collection/{collectionName}")
@Produces(MediaType.APPLICATION_JSON)
@SuppressWarnings("unchecked")
public JsonArray getCollection(@PathParam("collectionName") String collectionName) {
    JsonArrayBuilder arrayBuilder = Json.createArrayBuilder();

    MongoIterable<Document> iterable = producerTemplate.requestBody(
            "mongodb:camelMongoClient?database=test&collection=" + collectionName
                    + "&operation=findAll&dynamicity=true&outputType=MongoIterable",
            null, MongoIterable.class);

    MongoCursor<Document> iterator = iterable.iterator();
    while (iterator.hasNext()) {
        Document document = iterator.next();
        JsonObjectBuilder objectBuilder = Json.createObjectBuilder();
        objectBuilder.add("message", (String) document.get("message"));
        arrayBuilder.add(objectBuilder.build());
    }

    return arrayBuilder.build();
}
 
源代码2 项目: baleen   文件: ReNounScoring.java
private SetMultimap<ReNounFact, ScoredPattern> getFactToScoredPatternMultimap(
    MongoCollection<Document> factsCollection,
    MongoCollection<Document> scoredPatternsCollection) {

  SetMultimap<ReNounFact, ScoredPattern> factToPatternMap = HashMultimap.create();

  MongoCursor<Document> scoredPatternsCursor = scoredPatternsCollection.find().iterator();

  while (scoredPatternsCursor.hasNext()) {
    Document scoredPatternDocument = scoredPatternsCursor.next();
    Iterator<Document> factsMatchingScoredPatternIterator =
        factsCollection
            .find(eq(PATTERN_FACT_FIELD, scoredPatternDocument.get(PATTERN_FACT_FIELD)))
            .iterator();
    while (factsMatchingScoredPatternIterator.hasNext()) {
      Document factMatchingScoredPattern = factsMatchingScoredPatternIterator.next();
      ReNounFact fact = new ReNounFact(factMatchingScoredPattern);
      ScoredPattern scoredPattern = new ScoredPattern(scoredPatternDocument);
      factToPatternMap.put(fact, scoredPattern);
    }
  }
  return factToPatternMap;
}
 
源代码3 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the edges in the graph. If this is not possible for
 * the implementation, then an UnsupportedOperationException can be thrown.
 * 
 * @return
 *
 * @return an iterable reference to all edges in the graph
 */
public Stream<ChronoEdge> getChronoEdgeStream(boolean isParallel) {
	HashSet<ChronoEdge> ret = new HashSet<ChronoEdge>();
	MongoCursor<BsonDocument> cursor = edges.find().projection(Tokens.PRJ_ONLY_OUTV_LABEL_INV).iterator();

	while (cursor.hasNext()) {
		BsonDocument v = cursor.next();
		String outV = v.getString(Tokens.OUT_VERTEX).getValue();
		String label = v.getString(Tokens.LABEL).getValue();
		String inV = v.getString(Tokens.IN_VERTEX).getValue();
		String id = outV + "|" + label + "|" + inV;
		ret.add(new ChronoEdge(id, outV, inV, label, this));
	}
	if (isParallel)
		return ret.parallelStream();
	else
		return ret.stream();
}
 
源代码4 项目: DBus   文件: MongoWrapperDefaultHandler.java
/**
 * 根据oid去数据库回查数据
 *
 * @param oid
 * @return
 */
private Document fetchData(String schemaName, String tableName, String oid) {
    Document result = null;
    DbusDatasource datasource = GlobalCache.getDatasource();
    MongoClientURI uri = new MongoClientURI(datasource.getMasterUrl());
    MongoClient client = new MongoClient(uri);
    MongoDatabase database = client.getDatabase(schemaName);
    MongoCollection<Document> collection = database.getCollection(tableName);
    MongoCursor<Document> cursor = collection.find(new BasicDBObject().append("_id", new ObjectId(oid))).iterator();
    if (cursor.hasNext()) {
        result = cursor.next();
    } else {
        logger.error("get source data error. schemaName:{}, tableName:{}, oid:{}", schemaName, tableName, oid);
    }
    client.close();
    return result;

}
 
源代码5 项目: epcis   文件: ChronoGraph.java
public Stream<ChronoVertex> getChronoVertexStream(String key, Object value, boolean isParallel) {
	ElementHelper.validateProperty(null, key, value);
	HashSet<ChronoVertex> ret = new HashSet<ChronoVertex>();

	MongoCursor<BsonDocument> cursor = vertices
			.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED.append(key, (BsonValue) value))
			.projection(Tokens.PRJ_ONLY_ID).iterator();

	while (cursor.hasNext()) {
		BsonDocument v = cursor.next();
		ret.add(new ChronoVertex(v.getString(Tokens.ID).getValue(), this));
	}
	if (isParallel)
		return ret.parallelStream();
	else
		return ret.stream();
}
 
源代码6 项目: nifi   文件: GridFSITTestBase.java
public boolean fileHasProperties(String name, String bucketName, Map<String, String> attrs) {
    GridFSBucket bucket = GridFSBuckets.create(client.getDatabase(DB), bucketName);
    MongoCursor it = bucket.find(Document.parse(String.format("{ \"filename\": \"%s\" }", name))).iterator();
    boolean retVal = false;

    if (it.hasNext()) {
        GridFSFile file = (GridFSFile)it.next();
        Document metadata = file.getMetadata();
        if (metadata != null && metadata.size() == attrs.size()) {
            retVal = true;
            for (Map.Entry<String, Object> entry : metadata.entrySet()) {
                Object val = attrs.get(entry.getKey());
                if (val == null || !entry.getValue().equals(val)) {
                    retVal = false;
                    break;
                }
            }
        }
    }

    it.close();

    return retVal;
}
 
@Override
long executeQuery(int threadId, long threadRunCount, long globalRunCount, long selectorId, long randomId){
    final MongoCursor<Document> cursor = mongoCollection.find(eq(queriedField, selectorId)).iterator();
    //final MongoCursor<Document> cursor = mongoCollection.find(in(queriedField, selectorId, selectorId+1, selectorId+2, selectorId+3, selectorId+4)).iterator();
    long result = 0;
    try {
        while (cursor.hasNext()) {
            final Document doc = cursor.next();
            LOG.debug("Document {}", doc.toJson());
            result++;
        }
    } finally {
        cursor.close();
    }

    return result;
}
 
源代码8 项目: ByteTCC   文件: MongoCompensableLock.java
private String getTransactionOwnerInMongoDB(TransactionXid transactionXid) {
	byte[] global = transactionXid.getGlobalTransactionId();
	String instanceId = ByteUtils.byteArrayToString(global);

	try {
		String application = CommonUtils.getApplication(this.endpoint);
		String databaseName = application.replaceAll("\\W", "_");
		MongoDatabase mdb = this.mongoClient.getDatabase(databaseName);
		MongoCollection<Document> collection = mdb.getCollection(CONSTANTS_TB_LOCKS);

		FindIterable<Document> findIterable = collection.find(Filters.eq(CONSTANTS_FD_GLOBAL, instanceId));
		MongoCursor<Document> cursor = findIterable.iterator();
		if (cursor.hasNext()) {
			Document document = cursor.next();
			return document.getString("identifier");
		} else {
			return null;
		}
	} catch (RuntimeException rex) {
		logger.error("Error occurred while querying the lock-owner of transaction(gxid= {}).", instanceId, rex);
		return null;
	}
}
 
源代码9 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the vertices in the graph that have a particular
 * key/value property. If this is not possible for the implementation, then an
 * UnsupportedOperationException can be thrown. The graph implementation should
 * use indexing structures to make this efficient else a full vertex-filter scan
 * is required.
 * 
 * @param key   the key of vertex
 * @param value the value of the vertex
 * @return an iterable of vertices with provided key and value
 */
public Set<ChronoVertex> getChronoVertexSet(String key, Object value) {
	ElementHelper.validateProperty(null, key, value);
	HashSet<ChronoVertex> ret = new HashSet<ChronoVertex>();

	MongoCursor<BsonDocument> cursor = vertices
			.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED.append(key, (BsonValue) value))
			.projection(Tokens.PRJ_ONLY_ID).iterator();

	while (cursor.hasNext()) {
		BsonDocument v = cursor.next();
		ret.add(new ChronoVertex(v.getString(Tokens.ID).getValue(), this));
	}
	return ret;
}
 
源代码10 项目: baleen   文件: ReNounPatternGenerationTest.java
@Test
public void testLongerAttributes()
    throws AnalysisEngineProcessException, ResourceInitializationException {

  String initial =
      SENTENCE_1
          + SENTENCE_2
          + SENTENCE_3
          + SENTENCE_4
          + SENTENCE_5
          + SENTENCE_6
          + SENTENCE_7
          + SENTENCE_8;

  String extended =
      initial.replace("CEO", "chief executive officer").replace("Google", "Alphabet Limited");
  jCas.setDocumentText(extended);

  processJCas();

  MongoCursor<Document> found = output.find().iterator();

  assertTrue(found.hasNext());

  int count = 0;
  while (found.hasNext()) {
    count++;
    Document next = found.next();

    assertEquals("Alphabet Limited", next.get(SUBJECT_FIELD));
    assertEquals("chief executive officer", next.get(ATTRIBUTE_FIELD));
    assertEquals("Larry Page", next.get(OBJECT_FIELD));
    assertNotNull(next.get(SENTENCE_FIELD));
    assertNotNull(next.get(PATTERN_FIELD));
  }

  assertEquals(8, count);
}
 
源代码11 项目: zeppelin   文件: OldMongoNotebookRepo.java
/**
 * Find documents of which type of _id is object ID, and change it to note ID.
 * Since updating _id field is not allowed, remove original documents and insert
 * new ones with string _id(note ID)
 */
private void syncId() {
  // find documents whose id type is object id
  MongoCursor<Document> cursor =  coll.find(type("_id", BsonType.OBJECT_ID)).iterator();
  // if there is no such document, exit
  if (!cursor.hasNext())
    return;

  List<ObjectId> oldDocIds = new LinkedList<>();    // document ids need to update
  List<Document> updatedDocs = new LinkedList<>();  // new documents to be inserted

  while (cursor.hasNext()) {
    Document doc = cursor.next();
    // store original _id
    ObjectId oldId = doc.getObjectId("_id");
    oldDocIds.add(oldId);
    // store the document with string _id (note id)
    String noteId = doc.getString("id");
    doc.put("_id", noteId);
    updatedDocs.add(doc);
  }

  coll.insertMany(updatedDocs);
  coll.deleteMany(in("_id", oldDocIds));

  cursor.close();
}
 
源代码12 项目: Mongodb-WeAdmin   文件: MongoSdkBase.java
/**
 * 分页查询
 * @param table    表连接
 * @param filter   条件  com.mongodb.client.model.Filter
 * @param sort     排序    com.mongodb.client.model.Sorts
 * @param pageNum
 * @param pageSize
 * @return
 */
public  JSONObject getPage(MongoCollection table, Bson filter, Bson sort, int pageNum, int pageSize) {
    int totalCount = (int) (filter == null ? table.count(): table.count(filter));
    int totalPage = (int) (totalCount / pageSize + ((totalCount % pageSize == 0) ? 0 : 1));
    if (pageNum > totalPage){ pageNum = totalPage;}
    JSONObject msg = new JSONObject();
    msg.put("pageNum", pageNum);
    msg.put("pageSize", pageSize);
    msg.put("totalCount", totalCount);
    msg.put("totalPage", totalPage);
    List<JSONObject> list = new ArrayList<JSONObject>();
    if (totalCount > 0) {
        int startRow = pageNum > 0 ? (pageNum - 1) * pageSize : 0;
        FindIterable<Document> result = null;
        if (filter == null) {
            result = table.find().sort(sort).skip(startRow).limit(pageSize);
        } else {
            result = table.find(filter).sort(sort).skip(startRow).limit(pageSize);
        }
        MongoCursor<Document> iterator = result.iterator();
        while (iterator.hasNext()) {
            Document ddd = (Document) iterator.next();
            list.add(JSON.parseObject(diyObjectIdToJson(ddd)));
        }
    }
    msg.put("data", list);
    return msg;
}
 
源代码13 项目: SI   文件: ResourceDAO.java
public List<ChildResourceRef> getChildResourceRefs(String keyName, String keyValue) {

		ArrayList<ChildResourceRef> refList = new ArrayList<ChildResourceRef>();

		String now = LocalDateTime.now().toString(DateTimeFormat.forPattern("yyyyMMdd'T'HHmmss"));

		DBObject c1 = new BasicDBObject(EXPIRETIME_KEY, null);
		DBObject c2 = new BasicDBObject(EXPIRETIME_KEY, new BasicDBObject("$gt", now));
		
		BasicDBList or = new BasicDBList();
		or.add(c1);
		or.add(c2);
		
		BasicDBObject query = new BasicDBObject("$or", or).append(keyName,  keyValue);

		MongoCollection<Document> collection = context.getDatabaseManager()
				.getCollection(collectionName);
		//MongoCursor<Document> cursor = collection.find(
		//		new BasicDBObject(keyName, keyValue)).iterator();
		MongoCursor<Document> cursor = collection.find(query).iterator();

		while (cursor.hasNext()) {
			Document doc = cursor.next();
			
			ChildResourceRef ref = new ChildResourceRef();
			ref.setName(doc.getString(RESNAME_KEY));
			ref.setType(doc.getInteger(RESTYPE_KEY));
			ref.setValue(doc.getString(URI_KEY));
			
			refList.add(ref);
		}

		return refList;

	}
 
源代码14 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the vertices in the graph. If this is not possible
 * for the implementation, then an UnsupportedOperationException can be thrown.
 *
 * @return an iterable reference to all vertices in the graph
 */
public Iterable<ChronoVertex> getChronoVertices() {
	HashSet<String> idSet = new HashSet<String>();
	Function<BsonString, String> mapper = new Function<BsonString, String>() {
		@Override
		public String apply(BsonString val) {
			return val.getValue();
		}

	};
	HashSet<String> outV = new HashSet<String>();
	edges.distinct(Tokens.OUT_VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.OUT_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(outV);
	idSet.addAll(outV);
	HashSet<String> inV = new HashSet<String>();
	edges.distinct(Tokens.IN_VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.IN_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(inV);
	idSet.addAll(inV);

	MongoCursor<BsonDocument> vi = vertices.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED)
			.projection(Tokens.PRJ_ONLY_ID).iterator();
	while (vi.hasNext()) {
		BsonDocument d = vi.next();
		idSet.add(d.getString(Tokens.ID).getValue());
	}

	HashSet<String> vertex = new HashSet<String>();
	vertices.distinct(Tokens.VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(vertex);
	idSet.addAll(vertex);

	return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet());
}
 
源代码15 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the edges in the graph. If this is not possible for
 * the implementation, then an UnsupportedOperationException can be thrown.
 *
 * @return an iterable reference to all edges in the graph
 */
public Set<ChronoEdge> getChronoEdgeSet() {
	HashSet<ChronoEdge> ret = new HashSet<ChronoEdge>();
	MongoCursor<BsonDocument> cursor = edges.find().projection(Tokens.PRJ_ONLY_OUTV_LABEL_INV).iterator();

	while (cursor.hasNext()) {
		BsonDocument v = cursor.next();
		String outV = v.getString(Tokens.OUT_VERTEX).getValue();
		String label = v.getString(Tokens.LABEL).getValue();
		String inV = v.getString(Tokens.IN_VERTEX).getValue();
		String id = outV + "|" + label + "|" + inV;
		ret.add(new ChronoEdge(id, outV, inV, label, this));
	}
	return ret;
}
 
源代码16 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the edges in the graph that have a particular
 * key/value property. If this is not possible for the implementation, then an
 * UnsupportedOperationException can be thrown. The graph implementation should
 * use indexing structures to make this efficient else a full edge-filter scan
 * is required.
 * 
 * @param key   the key of the edge
 * @param value the value of the edge
 * @return an iterable of edges with provided key and value
 */
public Iterable<ChronoEdge> getChronoEdges(String key, Object value) {
	ElementHelper.validateProperty(null, key, value);

	HashSet<ChronoEdge> ret = new HashSet<ChronoEdge>();
	MongoCursor<BsonDocument> cursor = edges.find(Tokens.FLT_EDGE_FIELD_NOT_INCLUDED.append(key, (BsonValue) value))
			.projection(Tokens.PRJ_ONLY_ID).iterator();

	while (cursor.hasNext()) {
		BsonDocument v = cursor.next();
		ret.add(new ChronoEdge(v.getString(Tokens.ID).getValue(), this));
	}
	return ret;
}
 
源代码17 项目: SI   文件: ContentInstanceAnncDAO.java
private int deleteOldestNExpired(String containerId, int size) {
		
		log.debug("deleteOldestNExpired containerId:{}, size:{}", containerId, size);
		MongoCollection<Document> collection = context.getDatabaseManager()
				.getCollection(collectionName);
		BasicDBList and = new BasicDBList();
		DeleteResult result;
		
		if (size >= 0) {
			and.clear();	
			and.add(new BasicDBObject(PARENTID_KEY, containerId));
			and.add(new BasicDBObject(RESTYPE_KEY, RESOURCE_TYPE.CONTENT_INST.Value()));
			
			MongoCursor<Document> cursor = collection.find(new BasicDBObject("$and", and))
											.sort(new BasicDBObject(CREATETIME_KEY, 1))
											.limit(size).iterator();
			
			int deletedCount = 0;
			if (cursor.hasNext()) {
				Document doc = cursor.next();
//				and.clear();
//				and.add(new BasicDBObject(PARENTID_KEY, containerId));
//				and.add(new BasicDBObject(RESTYPE_KEY, RESOURCE_TYPE.CONTENT_INST.Value()));
//				and.add(new BasicDBObject(CREATETIME_KEY, new BasicDBObject("$lt", doc.get(CREATETIME_KEY))));
//				
				result = collection.deleteOne(new BasicDBObject(RESID_KEY, doc.get(RESID_KEY)));
	
				deletedCount += result.getDeletedCount();
			}
			log.debug("Deleted oldest contentInstance:{}", deletedCount);
			return deletedCount;
		}
		return 0;
		
	}
 
源代码18 项目: epcis   文件: ChronoGraph.java
/**
 * Return an iterable to all the vertices in the graph. If this is not possible
 * for the implementation, then an UnsupportedOperationException can be thrown.
 *
 * @return an iterable reference to all vertices in the graph
 */
public Stream<ChronoVertex> getChronoVertexStream(boolean isParallel) {
	HashSet<String> idSet = new HashSet<String>();
	Function<BsonString, String> mapper = new Function<BsonString, String>() {
		@Override
		public String apply(BsonString val) {
			return val.getValue();
		}

	};
	HashSet<String> outV = new HashSet<String>();
	edges.distinct(Tokens.OUT_VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.OUT_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(outV);
	idSet.addAll(outV);
	HashSet<String> inV = new HashSet<String>();
	edges.distinct(Tokens.IN_VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.IN_VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(inV);
	idSet.addAll(inV);

	MongoCursor<BsonDocument> vi = vertices.find(Tokens.FLT_VERTEX_FIELD_NOT_INCLUDED)
			.projection(Tokens.PRJ_ONLY_ID).iterator();
	while (vi.hasNext()) {
		BsonDocument d = vi.next();
		idSet.add(d.getString(Tokens.ID).getValue());
	}

	HashSet<String> vertex = new HashSet<String>();
	vertices.distinct(Tokens.VERTEX, BsonString.class)
			.filter(new BsonDocument(Tokens.VERTEX, new BsonDocument(Tokens.FC.$ne.toString(), new BsonNull())))
			.map(mapper).into(vertex);
	idSet.addAll(vertex);
	if (isParallel)
		return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet())
				.parallelStream();
	else
		return idSet.parallelStream().map(s -> new ChronoVertex(s, this)).collect(Collectors.toSet()).stream();
}
 
源代码19 项目: epcis   文件: ChronoGraph.java
public ArrayList<CachedChronoVertex> getCachedChronoVertices(BsonArray filters, String sortKey, Boolean isDesc,
		Integer limit) {
	ArrayList<CachedChronoVertex> vList = new ArrayList<CachedChronoVertex>();
	// Merge All the queries with $and
	CachedChronoGraph g = new CachedChronoGraph();
	BsonDocument baseQuery = new BsonDocument();
	FindIterable<BsonDocument> cursor;
	if (filters.isEmpty() == false) {
		baseQuery.put("$and", filters);
		cursor = vertices.find(baseQuery);
	} else {
		cursor = vertices.find();
	}
	if (sortKey != null) {
		if (isDesc == null)
			cursor.sort(new BsonDocument(sortKey, new BsonInt32(-1)));
		else if (isDesc == true) {
			cursor.sort(new BsonDocument(sortKey, new BsonInt32(-1)));
		} else
			cursor.sort(new BsonDocument(sortKey, new BsonInt32(1)));
	}
	if (limit != null)
		cursor.limit(limit);

	MongoCursor<BsonDocument> iter = cursor.iterator();
	while (iter.hasNext()) {
		BsonDocument doc = iter.next();
		String vid = doc.remove("_id").asString().getValue();
		CachedChronoVertex v = g.getChronoVertex(vid);
		v.setProperties(doc);
		vList.add(v);
	}
	return vList;
}
 
源代码20 项目: SI   文件: ContentInstanceDAO.java
private int deleteOldestNExpired(String containerId, int size) {
		
		log.debug("deleteOldestNExpired containerId:{}, size:{}", containerId, size);
		MongoCollection<Document> collection = context.getDatabaseManager()
				.getCollection(collectionName);
		BasicDBList and = new BasicDBList();
		DeleteResult result;
		/*
		
		String now = LocalDateTime.now().toString(DateTimeFormat.forPattern("yyyyMMdd'T'HHmmss"));
		
		and.add(new BasicDBObject(EXPIRETIME_KEY, new BasicDBObject("$lt", now)));
		and.add(new BasicDBObject(PARENTID_KEY, containerId));
		
		result = collection.deleteMany(new BasicDBObject("$and", and));
		size -= result.getDeletedCount();
		
		log.debug("Deleted expired contentInstance:{}", result.getDeletedCount());
		*/
		
		if (size >= 0) {
			and.clear();	
			and.add(new BasicDBObject(PARENTID_KEY, containerId));
			and.add(new BasicDBObject(RESTYPE_KEY, RESOURCE_TYPE.CONTENT_INST.Value()));
			
			MongoCursor<Document> cursor = collection.find(new BasicDBObject("$and", and))
											.sort(new BasicDBObject(CREATETIME_KEY, 1))
											.limit(size).iterator();
			
			int deletedCount = 0;
			if (cursor.hasNext()) {
				Document doc = cursor.next();
//				and.clear();
//				and.add(new BasicDBObject(PARENTID_KEY, containerId));
//				and.add(new BasicDBObject(RESTYPE_KEY, RESOURCE_TYPE.CONTENT_INST.Value()));
//				and.add(new BasicDBObject(CREATETIME_KEY, new BasicDBObject("$lt", doc.get(CREATETIME_KEY))));
//				
				result = collection.deleteOne(new BasicDBObject(RESID_KEY, doc.get(RESID_KEY)));
	
				deletedCount += result.getDeletedCount();
			}
			log.debug("Deleted oldest contentInstance:{}", deletedCount);
			return deletedCount;
		}
		return 0;
		
	}