下面列出了怎么用com.mongodb.client.model.CollationStrength的API类实例代码及写法,或者点击链接到github查看源代码。
@Test
public void testCollations() {
getMapper().map(ContainsRenamedFields.class);
getDs().save(asList(new ContainsRenamedFields("first", "last"),
new ContainsRenamedFields("First", "Last")));
Query query = getDs().find(ContainsRenamedFields.class)
.filter(eq("last_name", "last"));
assertEquals(1, query.iterator().toList().size());
assertEquals(2, query.iterator(new FindOptions()
.collation(builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build()))
.toList()
.size());
assertEquals(1, query.count());
assertEquals(2, query.count(new CountOptions()
.collation(builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build())));
}
@Test
public void testCollations() {
getMapper().map(ContainsRenamedFields.class);
getDs().save(asList(new ContainsRenamedFields("first", "last"),
new ContainsRenamedFields("First", "Last")));
Query query = getDs().find(ContainsRenamedFields.class)
.field("last_name").equal("last");
assertEquals(1, query.execute().toList().size());
assertEquals(2, query.execute(new FindOptions()
.collation(builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build()))
.toList()
.size());
assertEquals(1, query.count());
assertEquals(2, query.count(new CountOptions()
.collation(builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build())));
}
@Test
public void testDeleteWithCollation() {
getMapper().getCollection(FacebookUser.class).drop();
getDs().save(asList(new FacebookUser(1, "John Doe"),
new FacebookUser(2, "john doe")));
Query<FacebookUser> query = getDs().find(FacebookUser.class)
.filter(eq("username", "john doe"));
assertEquals(1, query.delete().getDeletedCount());
assertEquals(1, query.delete(new DeleteOptions()
.collation(Collation.builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build()))
.getDeletedCount());
}
@Test
public void testFindAndDeleteWithCollation() {
getMapper().getCollection(FacebookUser.class).drop();
getDs().save(asList(new FacebookUser(1, "John Doe"),
new FacebookUser(2, "john doe")));
Query<FacebookUser> query = getDs().find(FacebookUser.class)
.filter(eq("username", "john doe"));
assertNotNull(query.findAndDelete());
assertNull(query.findAndDelete());
FindAndDeleteOptions options = new FindAndDeleteOptions()
.collation(Collation.builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build());
assertNotNull(query.findAndDelete(options));
assertNull(query.iterator().tryNext());
}
public static Optional<Collation> collationFromJson(final String collationString) {
if (collationString.isEmpty()) {
return Optional.empty();
}
Collation.Builder builder = Collation.builder();
Document collationDoc = Document.parse(collationString);
if (collationDoc.containsKey("locale")) {
builder.locale(collationDoc.getString("locale"));
}
if (collationDoc.containsKey("caseLevel")) {
builder.caseLevel(collationDoc.getBoolean("caseLevel"));
}
if (collationDoc.containsKey("caseFirst")) {
builder.collationCaseFirst(
CollationCaseFirst.fromString(collationDoc.getString("caseFirst")));
}
if (collationDoc.containsKey("strength")) {
builder.collationStrength(CollationStrength.fromInt(collationDoc.getInteger("strength")));
}
if (collationDoc.containsKey("numericOrdering")) {
builder.numericOrdering(collationDoc.getBoolean("numericOrdering"));
}
if (collationDoc.containsKey("alternate")) {
builder.collationAlternate(
CollationAlternate.fromString(collationDoc.getString("alternate")));
}
if (collationDoc.containsKey("maxVariable")) {
builder.collationMaxVariable(
CollationMaxVariable.fromString(collationDoc.getString("maxVariable")));
}
if (collationDoc.containsKey("normalization")) {
builder.normalization(collationDoc.getBoolean("normalization"));
}
if (collationDoc.containsKey("backwards")) {
builder.backwards(collationDoc.getBoolean("backwards"));
}
return Optional.of(builder.build());
}
@Test
public void testUpdateWithCollation() {
getMapper().getCollection(FacebookUser.class).drop();
getDs().save(asList(new FacebookUser(1, "John Doe"),
new FacebookUser(2, "john doe")));
final Update<FacebookUser> update = getDs().find(FacebookUser.class)
.filter(eq("username", "john doe"))
.update(inc("loginCount"));
UpdateResult results = update.execute();
assertEquals(1, results.getModifiedCount());
assertEquals(0, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1)).next()
.loginCount);
assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
results = update.execute(new UpdateOptions()
.multi(true)
.collation(Collation.builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build()));
assertEquals(2, results.getModifiedCount());
assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
assertEquals(2, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
}
@Test
@DisplayName("test creates the expected collection cursor")
void testCreatesExpectedCollectionCursor() {
MongoSourceTask task = new MongoSourceTask();
Map<String, String> cfgMap = new HashMap<>();
cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
cfgMap.put(DATABASE_CONFIG, TEST_DATABASE);
cfgMap.put(COLLECTION_CONFIG, TEST_COLLECTION);
MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
when(mongoCollection.watch()).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
verify(mongoCollection, times(1)).watch();
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Pipeline
resetMocks();
cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
when(mongoCollection.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
verify(mongoCollection, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Complex
resetMocks();
cfgMap.put(BATCH_SIZE_CONFIG, "101");
FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
Collation collation =
Collation.builder()
.locale("en")
.caseLevel(true)
.collationCaseFirst(CollationCaseFirst.OFF)
.collationStrength(CollationStrength.IDENTICAL)
.collationAlternate(CollationAlternate.SHIFTED)
.collationMaxVariable(CollationMaxVariable.SPACE)
.numericOrdering(true)
.normalization(true)
.backwards(true)
.build();
cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());
cfg = new MongoSourceConfig(cfgMap);
task.initialize(context);
when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
when(mongoCollection.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
verify(mongoCollection, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).batchSize(101);
verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
verify(changeStreamIterable, times(1)).collation(collation);
verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
}
@Test
@DisplayName("test creates the expected database cursor")
void testCreatesExpectedDatabaseCursor() {
MongoSourceTask task = new MongoSourceTask();
Map<String, String> cfgMap = new HashMap<>();
cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
cfgMap.put(DATABASE_CONFIG, TEST_DATABASE);
MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.watch()).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).watch();
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Pipeline
resetMocks();
cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Complex
resetMocks();
cfgMap.put(BATCH_SIZE_CONFIG, "101");
FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
Collation collation =
Collation.builder()
.locale("en")
.caseLevel(true)
.collationCaseFirst(CollationCaseFirst.OFF)
.collationStrength(CollationStrength.IDENTICAL)
.collationAlternate(CollationAlternate.SHIFTED)
.collationMaxVariable(CollationMaxVariable.SPACE)
.numericOrdering(true)
.normalization(true)
.backwards(true)
.build();
cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());
cfg = new MongoSourceConfig(cfgMap);
task.initialize(context);
when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);
when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
when(mongoDatabase.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
verify(mongoDatabase, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).batchSize(101);
verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
verify(changeStreamIterable, times(1)).collation(collation);
verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
}
@Test
@DisplayName("test creates the expected client cursor")
void testCreatesExpectedClientCursor() {
MongoSourceTask task = new MongoSourceTask();
Map<String, String> cfgMap = new HashMap<>();
cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.watch()).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).watch();
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Pipeline
resetMocks();
cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
cfg = new MongoSourceConfig(cfgMap);
when(mongoClient.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
// Complex
resetMocks();
cfgMap.put(BATCH_SIZE_CONFIG, "101");
FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
Collation collation =
Collation.builder()
.locale("en")
.caseLevel(true)
.collationCaseFirst(CollationCaseFirst.OFF)
.collationStrength(CollationStrength.IDENTICAL)
.collationAlternate(CollationAlternate.SHIFTED)
.collationMaxVariable(CollationMaxVariable.SPACE)
.numericOrdering(true)
.normalization(true)
.backwards(true)
.build();
cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());
cfg = new MongoSourceConfig(cfgMap);
task.initialize(context);
when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);
when(mongoClient.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
when(mongoIterable.iterator()).thenReturn(mongoCursor);
task.createCursor(cfg, mongoClient);
verify(mongoClient, times(1)).watch(cfg.getPipeline().get());
verify(changeStreamIterable, times(1)).batchSize(101);
verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
verify(changeStreamIterable, times(1)).collation(collation);
verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
verify(mongoIterable, times(1)).iterator();
}
public static IndexOptions toIndexOptions(final Document options) {
final IndexOptions indexOptions = new IndexOptions();
applyIfTrue(options.containsKey("background"), () -> indexOptions.background(options.getBoolean("background")));
applyIfTrue(options.containsKey("bits"), () -> indexOptions.bits(options.getInteger("bits")));
applyIfTrue(options.containsKey("bucketSize"), () -> indexOptions.bucketSize(options.getDouble("bucketSize")));
applyIfTrue(options.containsKey("collation"), () -> {
final Document collationData = options.get("collation", Document.class);
final Collation.Builder builder = Collation.builder();
applyIfTrue(collationData.containsKey("backwards"), () -> builder.backwards(collationData.getBoolean("backwards")));
applyIfTrue(collationData.containsKey("caseLevel"), () -> builder.caseLevel(collationData.getBoolean("caseLevel")));
applyIfTrue(collationData.containsKey("alternate"),
() -> builder.collationAlternate(CollationAlternate.fromString(collationData.getString("alternate"))));
applyIfTrue(collationData.containsKey("caseFirst"),
() -> builder.collationCaseFirst(CollationCaseFirst.fromString(collationData.getString("caseFirst"))));
applyIfTrue(collationData.containsKey("maxVariable"),
() -> builder.collationMaxVariable(CollationMaxVariable.fromString(collationData.getString("maxVariable"))));
applyIfTrue(collationData.containsKey("strength"),
() -> builder.collationStrength(CollationStrength.fromInt(collationData.getInteger("strength"))));
applyIfTrue(collationData.containsKey("locale"), () -> builder.locale(collationData.getString("locale")));
applyIfTrue(collationData.containsKey("normalization"), () -> builder.normalization(collationData.getBoolean("normalization")));
applyIfTrue(collationData.containsKey("numericOrdering"),
() -> builder.numericOrdering(collationData.getBoolean("numericOrdering")));
indexOptions.collation(builder.build());
});
applyIfTrue(options.containsKey("default_language"), () -> indexOptions.defaultLanguage(options.getString("default_language")));
applyIfTrue(options.containsKey("expireAfterSeconds"),
() -> indexOptions.expireAfter(options.getLong("expireAfterSeconds"), TimeUnit.SECONDS));
applyIfTrue(options.containsKey("language_override"), () -> indexOptions.languageOverride(options.getString("language_override")));
applyIfTrue(options.containsKey("max"), () -> indexOptions.max(options.getDouble("max")));
applyIfTrue(options.containsKey("min"), () -> indexOptions.min(options.getDouble("min")));
applyIfTrue(options.containsKey("name"), () -> indexOptions.name(options.getString("name")));
applyIfTrue(options.containsKey("partialFilterExpression"),
() -> indexOptions.partialFilterExpression(options.get("partialFilterExpression", Bson.class)));
applyIfTrue(options.containsKey("sparse"), () -> indexOptions.sparse(options.getBoolean("sparse")));
applyIfTrue(options.containsKey("sphereVersion"), () -> indexOptions.sphereVersion(options.getInteger("sphereVersion")));
applyIfTrue(options.containsKey("storageEngine"), () -> indexOptions.storageEngine(options.get("storageEngine", Bson.class)));
applyIfTrue(options.containsKey("textVersion"), () -> indexOptions.textVersion(options.getInteger("textVersion")));
applyIfTrue(options.containsKey("unique"), () -> indexOptions.unique(options.getBoolean("unique")));
applyIfTrue(options.containsKey("version"), () -> indexOptions.version(options.getInteger("version")));
applyIfTrue(options.containsKey("weights"), () -> indexOptions.weights(options.get("weights", Bson.class)));
return indexOptions;
}
@Override
public CollationStrength strength() {
return get("strength");
}
@Test
public void testFindAndModifyWithOptions() {
getMapper().getCollection(FacebookUser.class).drop();
getDs().save(asList(new FacebookUser(1, "John Doe"),
new FacebookUser(2, "john doe")));
FacebookUser result = getDs().find(FacebookUser.class)
.filter(eq("username", "john doe"))
.modify(inc("loginCount"))
.execute();
assertEquals(0, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
assertEquals(1, result.loginCount);
result = getDs().find(FacebookUser.class)
.filter(eq("username", "john doe"))
.modify(inc("loginCount"))
.execute(new ModifyOptions()
.returnDocument(BEFORE)
.collation(Collation.builder()
.locale("en")
.collationStrength(CollationStrength.SECONDARY)
.build()));
assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
assertEquals(0, result.loginCount);
assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
.next()
.loginCount);
result = getDs().find(FacebookUser.class)
.filter(eq("id", 3L),
eq("username", "Jon Snow"))
.modify(inc("loginCount"))
.execute(new ModifyOptions()
.returnDocument(BEFORE)
.upsert(true));
assertNull(result);
FacebookUser user = getDs().find(FacebookUser.class).filter(eq("id", 3)).iterator(new FindOptions().limit(1))
.next();
assertEquals(1, user.loginCount);
assertEquals("Jon Snow", user.username);
result = getDs().find(FacebookUser.class)
.filter(eq("id", 4L),
eq("username", "Ron Swanson"))
.modify(inc("loginCount"))
.execute(new ModifyOptions()
.returnDocument(AFTER)
.upsert(true));
assertNotNull(result);
user = getDs().find(FacebookUser.class).filter(eq("id", 4)).iterator(new FindOptions().limit(1))
.next();
assertEquals(1, result.loginCount);
assertEquals("Ron Swanson", result.username);
assertEquals(1, user.loginCount);
assertEquals("Ron Swanson", user.username);
}