类com.mongodb.QueryBuilder源码实例Demo

下面列出了怎么用com.mongodb.QueryBuilder的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: deep-spark   文件: MongoReader.java
/**
 * Create query partition.
 *
 * @param partition the partition
 * @return the dB object
 */
private DBObject createQueryPartition(MongoPartition partition) {

    QueryBuilder queryBuilderMin = QueryBuilder.start(partition.getKey());
    DBObject bsonObjectMin = queryBuilderMin.greaterThanEquals(partition.splitWrapper().getStartToken()).get();

    QueryBuilder queryBuilderMax = QueryBuilder.start(partition.getKey());
    DBObject bsonObjectMax = queryBuilderMax.lessThan(partition.splitWrapper().getEndToken()).get();

    QueryBuilder queryBuilder = QueryBuilder.start();
    if (partition.splitWrapper().getStartToken() != null) {
        queryBuilder.and(bsonObjectMin);
    }

    if (partition.splitWrapper().getEndToken() != null) {
        queryBuilder.and(bsonObjectMax);
    }

    LOG.debug("mongodb query "+queryBuilder.get());

    return queryBuilder.get();
}
 
源代码2 项目: scava   文件: AbstractHistoricalMetricProvider.java
public List<Pongo> getHistoricalMeasurements(MetricProviderContext context, Project project, Date start, Date end) {
	
	DB db = context.getProjectDB(project);
	DBCollection collection = db.getCollection(this.getCollectionName());
	
	QueryBuilder builder = QueryBuilder.start();
	if (start != null) {
		builder.and("__datetime").greaterThanEquals(start.toJavaDate());
	}
	if (end != null) {
		builder.and("__datetime").lessThanEquals(end.toJavaDate());
	}
	 
	BasicDBObject query = (BasicDBObject) builder.get(); 

	Iterator<DBObject> it = collection.find(query).iterator();
	
	List<Pongo> pongoList = new ArrayList<Pongo>();
	
	while (it.hasNext()) {
		DBObject dbObject = it.next();
		pongoList.add(PongoFactory.getInstance().createPongo(dbObject));
	}
	
	return pongoList;
	
}
 
源代码3 项目: EDDI   文件: ResourceFilter.java
private Document createQuery(QueryFilters[] allQueryFilters) {
    QueryBuilder retQuery = new QueryBuilder();

    for (QueryFilters queryFilters : allQueryFilters) {
        List<DBObject> dbObjects = new LinkedList<>();
        for (QueryFilter queryFilter : queryFilters.getQueryFilters()) {
            if (queryFilter.getFilter() instanceof String) {
                Pattern resourcePattern = getPatternForRegex((String) queryFilter.getFilter());
                dbObjects.add(new QueryBuilder().put(queryFilter.getField()).regex(resourcePattern).get());
            } else {
                dbObjects.add(new QueryBuilder().put(queryFilter.getField()).is(queryFilter.getFilter()).get());
            }
        }

        DBObject[] dbObjectArray = dbObjects.toArray(new DBObject[dbObjects.size()]);

        DBObject filterQuery;
        if (dbObjectArray.length > 0) {
            if (queryFilters.getConnectingType() == QueryFilters.ConnectingType.AND) {
                filterQuery = new QueryBuilder().and(dbObjectArray).get();
            } else {
                filterQuery = new QueryBuilder().or(dbObjectArray).get();
            }

            retQuery.and(filterQuery);
        }

    }

    return new Document(retQuery.get().toMap());
}
 
源代码4 项目: deep-spark   文件: MongoDeepJobConfig.java
/**
 * Filter query.
 *
 * @param filters the filters
 * @return the mongo deep job config
 */
public MongoDeepJobConfig<T> filterQuery(Filter[] filters) {

    if (filters.length > 0) {
        List<BasicDBObject> list = new ArrayList<>();

        QueryBuilder queryBuilder = QueryBuilder.start();
        for (int i = 0; i < filters.length; i++) {
            BasicDBObject bsonObject = new BasicDBObject();

            Filter filter = filters[i];
            if (filter.getFilterType().equals(FilterType.EQ)) {
                bsonObject.put(filter.getField(), filter.getValue());
            } else {
                bsonObject.put(filter.getField(),
                        new BasicDBObject("$".concat(filter.getFilterType().getFilterTypeId().toLowerCase()),
                                filter.getValue()));
            }

            list.add(bsonObject);
        }
        queryBuilder.and(list.toArray(new BasicDBObject[list.size()]));

        filterQuery(queryBuilder);
    }
    return this;

}
 
源代码5 项目: scava   文件: RawMetricResource.java
public Representation doRepresent() {
	
	/**
	 * Fetch data metrics for both HistoricalMetricProvider & TransientMetricProvider
	 */
	String projectId = (String) getRequest().getAttributes().get("projectid");
	String metricId = (String) getRequest().getAttributes().get("metricid");
	
	String start = getQueryValue("startDate");
	String end = getQueryValue("endDate");
	
	QueryBuilder builder = QueryBuilder.start();
	try {
		if (start != null && start != "") {
			builder.and("__datetime").greaterThanEquals(new Date(start).toJavaDate());
		}
		if (end != null && end != "") {
			builder.and("__datetime").lessThanEquals(new Date(end).toJavaDate());
		}
	} catch (ParseException e) {
		e.getStackTrace();
	}
	
	BasicDBObject query = (BasicDBObject) builder.get(); 
	
	ArrayNode results = mapper.createArrayNode();
			
	if (projectId != null && metricId != null) {
		this.db = mongo.getDB(ANALYSIS_SCHEDULING_DATABASE);
		ProjectAnalysisResportory repository = new ProjectAnalysisResportory(this.db);
		Iterable<MetricExecution> listMetricExecutions = repository.getMetricExecutions().findByProjectId(projectId);
		
		List<String> metricExecutions = new ArrayList<>();
		for (MetricExecution metricExecution : listMetricExecutions) {
			metricExecutions.add(metricExecution.getMetricProviderId());
		}

		if (metricExecutions.contains(metricId)) {
			List<IMetricProvider> platformProvider = this.platform.getMetricProviderManager().getMetricProviders();
			for (IMetricProvider iMetricProvider : platformProvider) {
				if (iMetricProvider.getIdentifier().equals(metricId)) {
					Project project = platform.getProjectRepositoryManager().getProjectRepository().getProjects().findOneByShortName(projectId);
					if(iMetricProvider instanceof IHistoricalMetricProvider) {
						results.addAll(getHistoricDocuments(platform.getMetricsRepository(project).getDb().getCollection(((IHistoricalMetricProvider) iMetricProvider).getCollectionName()), query));
					} else if (iMetricProvider instanceof ITransientMetricProvider) {
						results.addAll(getTransientDocuments(((ITransientMetricProvider) iMetricProvider).adapt(platform.getMetricsRepository(project).getDb()).getPongoCollections()));
					}
					break;
				}
			}

		}
	
	}
	
	return Util.createJsonRepresentation(results);
}
 
源代码6 项目: scava   文件: MetricVisualisationResource.java
public Representation doRepresent() {
		String projectName = (String) getRequest().getAttributes().get("projectid");
		String metricId = (String) getRequest().getAttributes().get("metricid");
		
		String agg = getQueryValue("agg");
		String start = getQueryValue("startDate");
		String end = getQueryValue("endDate");
		
		QueryBuilder builder = QueryBuilder.start();
		if (agg != null && agg != "") {
//			builder.... // TODO
		}
		try {
			if (start != null && start != "") {
				builder.and("__datetime").greaterThanEquals(new Date(start).toJavaDate());
			}
			if (end != null && end != "") {
				builder.and("__datetime").lessThanEquals(new Date(end).toJavaDate());
			}
		} catch (ParseException e) {
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "Invalid date. Format must be YYYYMMDD.");
		}
		
		BasicDBObject query = (BasicDBObject) builder.get(); 
		
		ProjectRepository projectRepo = platform.getProjectRepositoryManager().getProjectRepository();
		
		Project project = projectRepo.getProjects().findOneByShortName(projectName);
		if (project == null) {
			getResponse().setStatus(Status.CLIENT_ERROR_BAD_REQUEST);
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "No project was found with the requested name.");
		}
		
		MetricVisualisationExtensionPointManager manager = MetricVisualisationExtensionPointManager.getInstance();
		manager.getRegisteredVisualisations();

		MetricVisualisation vis = manager.findVisualisationById(metricId);
		
		if (vis == null) {
			return Util.generateErrorMessageRepresentation(generateRequestJson(projectName, metricId), "No visualiser found with specified ID.");
		}
		
		DB db = platform.getMetricsRepository(project).getDb();
		JsonNode visualisation = vis.visualise(db, query);
		
		StringRepresentation resp = new StringRepresentation(visualisation.toString());
		resp.setMediaType(MediaType.APPLICATION_JSON);
		return resp;
	}
 
源代码7 项目: deep-spark   文件: MongoNativeExtractor.java
/**
 * Calculate splits.
 *
 * @param collection the collection
 * @return the deep partition [ ]
 */
private DeepPartition[] calculateSplits(DBCollection collection) {

    BasicDBList splitData = getSplitData(collection);
    List<ServerAddress> serverAddressList = collection.getDB().getMongo().getServerAddressList();

    if (splitData == null) {
        Pair<BasicDBList, List<ServerAddress>> pair = getSplitDataCollectionShardEnviroment(getShards(collection),
                collection.getDB().getName(),
                collection.getName());
        splitData = pair.left;
        serverAddressList = pair.right;
    }

    Object lastKey = null; // Lower boundary of the first min split

    List<String> stringHosts = new ArrayList<>();

    for (ServerAddress serverAddress : serverAddressList) {
        stringHosts.add(serverAddress.toString());
    }
    int i = 0;

    MongoPartition[] partitions = new MongoPartition[splitData.size() + 1];

    for (Object aSplitData : splitData) {

        BasicDBObject currentKey = (BasicDBObject) aSplitData;

        Object currentO = currentKey.get(MONGO_DEFAULT_ID);

        partitions[i] = new MongoPartition(mongoDeepJobConfig.getRddId(), i, new DeepTokenRange(lastKey,
                currentO, stringHosts), MONGO_DEFAULT_ID);

        lastKey = currentO;
        i++;
    }
    QueryBuilder queryBuilder = QueryBuilder.start(MONGO_DEFAULT_ID);
    queryBuilder.greaterThanEquals(lastKey);
    partitions[i] = new MongoPartition(0, i, new DeepTokenRange(lastKey, null, stringHosts), MONGO_DEFAULT_ID);
    return partitions;
}
 
源代码8 项目: deep-spark   文件: MongoReader.java
/**
 * Generate filter query.
 *
 * @param partition the partition
 * @return the dB object
 */
private DBObject generateFilterQuery(MongoPartition partition) {

    if (mongoDeepJobConfig.getQuery() != null) {
        QueryBuilder queryBuilder = QueryBuilder.start();

        queryBuilder.and(createQueryPartition(partition), mongoDeepJobConfig.getQuery());

        LOG.debug("mongodb query "+queryBuilder.get());

        return queryBuilder.get();
    }

    return createQueryPartition(partition);

}
 
源代码9 项目: deep-spark   文件: MongoDeepJobConfig.java
/**
 * {@inheritDoc}
 */
@Override
public MongoDeepJobConfig<T> filterQuery(QueryBuilder query) {
    this.query = query.get();
    return this;
}
 
源代码10 项目: deep-spark   文件: IMongoDeepJobConfig.java
/**
 * Filter query
 *
 * @param query
 * @return this object.
 */
S filterQuery(QueryBuilder query);
 
 类所在包
 类方法
 同包方法