类com.mongodb.client.model.Collation源码实例Demo

下面列出了怎么用com.mongodb.client.model.Collation的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: quarkus   文件: BasicInteractionTest.java
@Test
void testSingleDocumentDeletionWithOptions() {
    ReactiveMongoDatabase database = client.getDatabase(DATABASE);
    ReactiveMongoCollection<Document> collection = database.getCollection(randomAlphaString(8));

    List<Document> documents = new ArrayList<>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents).await().indefinitely();

    DeleteResult result = collection.deleteOne(eq("i", 10),
            new DeleteOptions().collation(
                    Collation.builder().locale("en").caseLevel(true).build()))
            .await().indefinitely();
    assertThat(result.getDeletedCount()).isEqualTo(1);
    assertThat(collection.find(eq("i", 10)).collectItems().first().await().indefinitely()).isNull();
    Long count = collection.countDocuments().await().indefinitely();
    assertThat(count).isEqualTo(99);
}
 
源代码2 项目: quarkus   文件: BasicInteractionTest.java
@Test
void testMultipleDocumentDeletionWithOptions() {
    ReactiveMongoDatabase database = client.getDatabase(DATABASE);
    ReactiveMongoCollection<Document> collection = database.getCollection(randomAlphaString(8));

    List<Document> documents = new ArrayList<>();
    for (int i = 0; i < 100; i++) {
        documents.add(new Document("i", i));
    }
    collection.insertMany(documents).await().indefinitely();

    DeleteResult result = collection.deleteMany(gte("i", 90), new DeleteOptions().collation(
            Collation.builder().locale("en").caseLevel(true).build()))
            .await().indefinitely();
    assertThat(result.getDeletedCount()).isEqualTo(10);
    assertThat(collection.find(eq("i", 90)).collectItems().first().await().asOptional().indefinitely()).isEmpty();
    Long count = collection.countDocuments().await().indefinitely();
    assertThat(count).isEqualTo(90);
}
 
源代码3 项目: morphia   文件: AggregationTest.java
@Test
public void testCollation() {
    getDs().save(asList(new User("john doe", new Date()), new User("John Doe", new Date())));

    Aggregation<User> pipeline = getDs()
                                     .aggregate(User.class)
                                     .match(eq("name", "john doe"));
    assertEquals(1, count(pipeline.execute(User.class)));

    assertEquals(2, count(pipeline.execute(User.class,
        new dev.morphia.aggregation.experimental.AggregationOptions()
            .collation(Collation.builder()
                                .locale("en")
                                .collationStrength(SECONDARY)
                                .build()))));
}
 
源代码4 项目: morphia   文件: TestDatastore.java
@Test
public void testDeleteWithCollation() {
    getMapper().getCollection(FacebookUser.class).drop();
    getDs().save(asList(new FacebookUser(1, "John Doe"),
        new FacebookUser(2, "john doe")));

    Query<FacebookUser> query = getDs().find(FacebookUser.class)
                                       .filter(eq("username", "john doe"));
    assertEquals(1, query.delete().getDeletedCount());

    assertEquals(1, query.delete(new DeleteOptions()
                                     .collation(Collation.builder()
                                                         .locale("en")
                                                         .collationStrength(CollationStrength.SECONDARY)
                                                         .build()))
                         .getDeletedCount());
}
 
源代码5 项目: morphia   文件: TestDatastore.java
@Test
public void testFindAndDeleteWithCollation() {
    getMapper().getCollection(FacebookUser.class).drop();
    getDs().save(asList(new FacebookUser(1, "John Doe"),
        new FacebookUser(2, "john doe")));

    Query<FacebookUser> query = getDs().find(FacebookUser.class)
                                       .filter(eq("username", "john doe"));
    assertNotNull(query.findAndDelete());
    assertNull(query.findAndDelete());

    FindAndDeleteOptions options = new FindAndDeleteOptions()
                                       .collation(Collation.builder()
                                                           .locale("en")
                                                           .collationStrength(CollationStrength.SECONDARY)
                                                           .build());
    assertNotNull(query.findAndDelete(options));
    assertNull(query.iterator().tryNext());
}
 
源代码6 项目: mongo-kafka   文件: ConfigHelper.java
public static Optional<Collation> collationFromJson(final String collationString) {
  if (collationString.isEmpty()) {
    return Optional.empty();
  }
  Collation.Builder builder = Collation.builder();
  Document collationDoc = Document.parse(collationString);
  if (collationDoc.containsKey("locale")) {
    builder.locale(collationDoc.getString("locale"));
  }
  if (collationDoc.containsKey("caseLevel")) {
    builder.caseLevel(collationDoc.getBoolean("caseLevel"));
  }
  if (collationDoc.containsKey("caseFirst")) {
    builder.collationCaseFirst(
        CollationCaseFirst.fromString(collationDoc.getString("caseFirst")));
  }
  if (collationDoc.containsKey("strength")) {
    builder.collationStrength(CollationStrength.fromInt(collationDoc.getInteger("strength")));
  }
  if (collationDoc.containsKey("numericOrdering")) {
    builder.numericOrdering(collationDoc.getBoolean("numericOrdering"));
  }
  if (collationDoc.containsKey("alternate")) {
    builder.collationAlternate(
        CollationAlternate.fromString(collationDoc.getString("alternate")));
  }
  if (collationDoc.containsKey("maxVariable")) {
    builder.collationMaxVariable(
        CollationMaxVariable.fromString(collationDoc.getString("maxVariable")));
  }
  if (collationDoc.containsKey("normalization")) {
    builder.normalization(collationDoc.getBoolean("normalization"));
  }
  if (collationDoc.containsKey("backwards")) {
    builder.backwards(collationDoc.getBoolean("backwards"));
  }
  return Optional.of(builder.build());
}
 
源代码7 项目: morphia   文件: AggregationTest.java
@Test
public void testCollation() {
    getDs().save(asList(new User("john doe", new Date()), new User("John Doe", new Date())));

    Aggregation<User> pipeline = getDs().aggregate(User.class)
                                        .match(eq("name", "john doe"));
    Assert.assertEquals(1, count(pipeline.execute(User.class)));

    Assert.assertEquals(2, count(pipeline.execute(User.class,
        new dev.morphia.aggregation.experimental.AggregationOptions()
            .collation(Collation.builder()
                                .locale("en")
                                .collationStrength(SECONDARY)
                                .build()))));
}
 
源代码8 项目: morphia   文件: TestDatastore.java
@Test
public void testUpdateWithCollation() {
    getMapper().getCollection(FacebookUser.class).drop();
    getDs().save(asList(new FacebookUser(1, "John Doe"),
        new FacebookUser(2, "john doe")));

    final Update<FacebookUser> update = getDs().find(FacebookUser.class)
                                               .filter(eq("username", "john doe"))
                                               .update(inc("loginCount"));

    UpdateResult results = update.execute();

    assertEquals(1, results.getModifiedCount());
    assertEquals(0, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1)).next()
                        .loginCount);
    assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);

    results = update.execute(new UpdateOptions()
                                 .multi(true)
                                 .collation(Collation.builder()
                                                     .locale("en")
                                                     .collationStrength(CollationStrength.SECONDARY)
                                                     .build()));
    assertEquals(2, results.getModifiedCount());
    assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);
    assertEquals(2, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);
}
 
源代码9 项目: mongo-kafka   文件: MongoSourceConfig.java
public Optional<Collation> getCollation() {
  return collationFromJson(getString(COLLATION_CONFIG));
}
 
源代码10 项目: mongo-kafka   文件: MongoSourceTaskTest.java
@Test
@DisplayName("test creates the expected collection cursor")
void testCreatesExpectedCollectionCursor() {
  MongoSourceTask task = new MongoSourceTask();
  Map<String, String> cfgMap = new HashMap<>();
  cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
  cfgMap.put(DATABASE_CONFIG, TEST_DATABASE);
  cfgMap.put(COLLECTION_CONFIG, TEST_COLLECTION);
  MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
  when(mongoCollection.watch()).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
  verify(mongoCollection, times(1)).watch();
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Pipeline
  resetMocks();
  cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
  cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
  when(mongoCollection.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
  verify(mongoCollection, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Complex
  resetMocks();
  cfgMap.put(BATCH_SIZE_CONFIG, "101");

  FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
  cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
  Collation collation =
      Collation.builder()
          .locale("en")
          .caseLevel(true)
          .collationCaseFirst(CollationCaseFirst.OFF)
          .collationStrength(CollationStrength.IDENTICAL)
          .collationAlternate(CollationAlternate.SHIFTED)
          .collationMaxVariable(CollationMaxVariable.SPACE)
          .numericOrdering(true)
          .normalization(true)
          .backwards(true)
          .build();
  cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());

  cfg = new MongoSourceConfig(cfgMap);

  task.initialize(context);
  when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
  when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.getCollection(TEST_COLLECTION)).thenReturn(mongoCollection);
  when(mongoCollection.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).getCollection(TEST_COLLECTION);
  verify(mongoCollection, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).batchSize(101);
  verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
  verify(changeStreamIterable, times(1)).collation(collation);
  verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();
}
 
源代码11 项目: mongo-kafka   文件: MongoSourceTaskTest.java
@Test
@DisplayName("test creates the expected database cursor")
void testCreatesExpectedDatabaseCursor() {
  MongoSourceTask task = new MongoSourceTask();
  Map<String, String> cfgMap = new HashMap<>();
  cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
  cfgMap.put(DATABASE_CONFIG, TEST_DATABASE);
  MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.watch()).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).watch();
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Pipeline
  resetMocks();
  cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
  cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Complex
  resetMocks();
  cfgMap.put(BATCH_SIZE_CONFIG, "101");

  FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
  cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
  Collation collation =
      Collation.builder()
          .locale("en")
          .caseLevel(true)
          .collationCaseFirst(CollationCaseFirst.OFF)
          .collationStrength(CollationStrength.IDENTICAL)
          .collationAlternate(CollationAlternate.SHIFTED)
          .collationMaxVariable(CollationMaxVariable.SPACE)
          .numericOrdering(true)
          .normalization(true)
          .backwards(true)
          .build();
  cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());

  cfg = new MongoSourceConfig(cfgMap);

  task.initialize(context);
  when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
  when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);

  when(mongoClient.getDatabase(TEST_DATABASE)).thenReturn(mongoDatabase);
  when(mongoDatabase.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).getDatabase(TEST_DATABASE);
  verify(mongoDatabase, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).batchSize(101);
  verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
  verify(changeStreamIterable, times(1)).collation(collation);
  verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();
}
 
源代码12 项目: mongo-kafka   文件: MongoSourceTaskTest.java
@Test
@DisplayName("test creates the expected client cursor")
void testCreatesExpectedClientCursor() {
  MongoSourceTask task = new MongoSourceTask();
  Map<String, String> cfgMap = new HashMap<>();
  cfgMap.put(CONNECTION_URI_CONFIG, "mongodb://localhost");
  MongoSourceConfig cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.watch()).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).watch();
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Pipeline
  resetMocks();
  cfgMap.put(PIPELINE_CONFIG, "[{$match: {operationType: 'insert'}}]");
  cfg = new MongoSourceConfig(cfgMap);

  when(mongoClient.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();

  // Complex
  resetMocks();
  cfgMap.put(BATCH_SIZE_CONFIG, "101");

  FullDocument fullDocument = FullDocument.UPDATE_LOOKUP;
  cfgMap.put(FULL_DOCUMENT_CONFIG, fullDocument.getValue());
  Collation collation =
      Collation.builder()
          .locale("en")
          .caseLevel(true)
          .collationCaseFirst(CollationCaseFirst.OFF)
          .collationStrength(CollationStrength.IDENTICAL)
          .collationAlternate(CollationAlternate.SHIFTED)
          .collationMaxVariable(CollationMaxVariable.SPACE)
          .numericOrdering(true)
          .normalization(true)
          .backwards(true)
          .build();
  cfgMap.put(COLLATION_CONFIG, collation.asDocument().toJson());
  cfg = new MongoSourceConfig(cfgMap);

  task.initialize(context);
  when(context.offsetStorageReader()).thenReturn(offsetStorageReader);
  when(offsetStorageReader.offset(task.createPartitionMap(cfg))).thenReturn(OFFSET);

  when(mongoClient.watch(cfg.getPipeline().get())).thenReturn(changeStreamIterable);
  when(changeStreamIterable.batchSize(101)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.fullDocument(fullDocument)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.collation(collation)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.startAfter(RESUME_TOKEN)).thenReturn(changeStreamIterable);
  when(changeStreamIterable.withDocumentClass(BsonDocument.class)).thenReturn(mongoIterable);
  when(mongoIterable.iterator()).thenReturn(mongoCursor);

  task.createCursor(cfg, mongoClient);

  verify(mongoClient, times(1)).watch(cfg.getPipeline().get());
  verify(changeStreamIterable, times(1)).batchSize(101);
  verify(changeStreamIterable, times(1)).fullDocument(fullDocument);
  verify(changeStreamIterable, times(1)).collation(collation);
  verify(changeStreamIterable, times(1)).startAfter(RESUME_TOKEN);
  verify(changeStreamIterable, times(1)).withDocumentClass(BsonDocument.class);
  verify(mongoIterable, times(1)).iterator();
}
 
源代码13 项目: quarkus   文件: ReactivePanacheQueryImpl.java
@Override
public <T extends Entity> ReactivePanacheQuery<T> withCollation(Collation collation) {
    this.collation = collation;
    return (ReactivePanacheQuery<T>) this;
}
 
源代码14 项目: quarkus   文件: PanacheQueryImpl.java
@Override
public <T extends Entity> PanacheQuery<T> withCollation(Collation collation) {
    this.collation = collation;
    return (PanacheQuery<T>) this;
}
 
源代码15 项目: jpa-unit   文件: IndexOptionsUtils.java
public static IndexOptions toIndexOptions(final Document options) {
    final IndexOptions indexOptions = new IndexOptions();

    applyIfTrue(options.containsKey("background"), () -> indexOptions.background(options.getBoolean("background")));
    applyIfTrue(options.containsKey("bits"), () -> indexOptions.bits(options.getInteger("bits")));
    applyIfTrue(options.containsKey("bucketSize"), () -> indexOptions.bucketSize(options.getDouble("bucketSize")));
    applyIfTrue(options.containsKey("collation"), () -> {
        final Document collationData = options.get("collation", Document.class);
        final Collation.Builder builder = Collation.builder();
        applyIfTrue(collationData.containsKey("backwards"), () -> builder.backwards(collationData.getBoolean("backwards")));
        applyIfTrue(collationData.containsKey("caseLevel"), () -> builder.caseLevel(collationData.getBoolean("caseLevel")));
        applyIfTrue(collationData.containsKey("alternate"),
                () -> builder.collationAlternate(CollationAlternate.fromString(collationData.getString("alternate"))));
        applyIfTrue(collationData.containsKey("caseFirst"),
                () -> builder.collationCaseFirst(CollationCaseFirst.fromString(collationData.getString("caseFirst"))));
        applyIfTrue(collationData.containsKey("maxVariable"),
                () -> builder.collationMaxVariable(CollationMaxVariable.fromString(collationData.getString("maxVariable"))));
        applyIfTrue(collationData.containsKey("strength"),
                () -> builder.collationStrength(CollationStrength.fromInt(collationData.getInteger("strength"))));
        applyIfTrue(collationData.containsKey("locale"), () -> builder.locale(collationData.getString("locale")));
        applyIfTrue(collationData.containsKey("normalization"), () -> builder.normalization(collationData.getBoolean("normalization")));
        applyIfTrue(collationData.containsKey("numericOrdering"),
                () -> builder.numericOrdering(collationData.getBoolean("numericOrdering")));
        indexOptions.collation(builder.build());
    });
    applyIfTrue(options.containsKey("default_language"), () -> indexOptions.defaultLanguage(options.getString("default_language")));
    applyIfTrue(options.containsKey("expireAfterSeconds"),
            () -> indexOptions.expireAfter(options.getLong("expireAfterSeconds"), TimeUnit.SECONDS));
    applyIfTrue(options.containsKey("language_override"), () -> indexOptions.languageOverride(options.getString("language_override")));
    applyIfTrue(options.containsKey("max"), () -> indexOptions.max(options.getDouble("max")));
    applyIfTrue(options.containsKey("min"), () -> indexOptions.min(options.getDouble("min")));
    applyIfTrue(options.containsKey("name"), () -> indexOptions.name(options.getString("name")));
    applyIfTrue(options.containsKey("partialFilterExpression"),
            () -> indexOptions.partialFilterExpression(options.get("partialFilterExpression", Bson.class)));
    applyIfTrue(options.containsKey("sparse"), () -> indexOptions.sparse(options.getBoolean("sparse")));
    applyIfTrue(options.containsKey("sphereVersion"), () -> indexOptions.sphereVersion(options.getInteger("sphereVersion")));
    applyIfTrue(options.containsKey("storageEngine"), () -> indexOptions.storageEngine(options.get("storageEngine", Bson.class)));
    applyIfTrue(options.containsKey("textVersion"), () -> indexOptions.textVersion(options.getInteger("textVersion")));
    applyIfTrue(options.containsKey("unique"), () -> indexOptions.unique(options.getBoolean("unique")));
    applyIfTrue(options.containsKey("version"), () -> indexOptions.version(options.getInteger("version")));
    applyIfTrue(options.containsKey("weights"), () -> indexOptions.weights(options.get("weights", Bson.class)));

    return indexOptions;
}
 
源代码16 项目: morphia   文件: FindOptions.java
/**
 * @return the collation
 */
public Collation getCollation() {
    return this.collation;
}
 
源代码17 项目: morphia   文件: AggregationOptions.java
/**
 * @return the configuration value
 */
public Collation collation() {
    return collation;
}
 
源代码18 项目: morphia   文件: AggregationOptions.java
/**
 * @return the configuration value
 */
public Collation getCollation() {
    return collation;
}
 
源代码19 项目: morphia   文件: TestDatastore.java
@Test
public void testFindAndModifyWithOptions() {
    getMapper().getCollection(FacebookUser.class).drop();
    getDs().save(asList(new FacebookUser(1, "John Doe"),
        new FacebookUser(2, "john doe")));

    FacebookUser result = getDs().find(FacebookUser.class)
                                 .filter(eq("username", "john doe"))
                                 .modify(inc("loginCount"))
                                 .execute();

    assertEquals(0, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);
    assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);
    assertEquals(1, result.loginCount);

    result = getDs().find(FacebookUser.class)
                    .filter(eq("username", "john doe"))
                    .modify(inc("loginCount"))
                    .execute(new ModifyOptions()
                                 .returnDocument(BEFORE)
                                 .collation(Collation.builder()
                                                     .locale("en")
                                                     .collationStrength(CollationStrength.SECONDARY)
                                                     .build()));
    assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 1)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);
    assertEquals(0, result.loginCount);
    assertEquals(1, getDs().find(FacebookUser.class).filter(eq("id", 2)).iterator(new FindOptions().limit(1))
                           .next()
                        .loginCount);

    result = getDs().find(FacebookUser.class)
                    .filter(eq("id", 3L),
                        eq("username", "Jon Snow"))
                    .modify(inc("loginCount"))
                    .execute(new ModifyOptions()
                                 .returnDocument(BEFORE)
                                 .upsert(true));

    assertNull(result);
    FacebookUser user = getDs().find(FacebookUser.class).filter(eq("id", 3)).iterator(new FindOptions().limit(1))
                               .next();
    assertEquals(1, user.loginCount);
    assertEquals("Jon Snow", user.username);


    result = getDs().find(FacebookUser.class)
                    .filter(eq("id", 4L),
                        eq("username", "Ron Swanson"))
                    .modify(inc("loginCount"))
                    .execute(new ModifyOptions()
                                 .returnDocument(AFTER)
                                 .upsert(true));

    assertNotNull(result);
    user = getDs().find(FacebookUser.class).filter(eq("id", 4)).iterator(new FindOptions().limit(1))
                  .next();
    assertEquals(1, result.loginCount);
    assertEquals("Ron Swanson", result.username);
    assertEquals(1, user.loginCount);
    assertEquals("Ron Swanson", user.username);
}
 
源代码20 项目: quarkus   文件: PanacheQuery.java
/**
 * Define the collation used for this query.
 *
 * @param collation the collation to be used for this query.
 * @return this query, modified
 */
public <T extends Entity> PanacheQuery<T> withCollation(Collation collation);
 
源代码21 项目: quarkus   文件: ReactivePanacheQuery.java
/**
 * Define the collation used for this query.
 *
 * @param collation the collation to be used for this query.
 * @return this query, modified
 */
public <T extends Entity> ReactivePanacheQuery<T> withCollation(Collation collation);
 
 类所在包
 类方法
 同包方法