下面列出了org.apache.hadoop.hbase.TableNotFoundException#org.apache.commons.lang3.time.StopWatch 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* {@inheritDoc}
*/
@Override
public void warmUpCache(Context context) {
for (Map.Entry<String, Integer> entry : getDescriptorPreloadFolders().entrySet()) {
String treeRoot = entry.getKey();
int depth = entry.getValue();
StopWatch stopWatch = new StopWatch();
logger.info("Starting preload of tree [{}] with depth {}", treeRoot, depth);
stopWatch.start();
try {
contentStoreService.getTree(context, treeRoot, depth);
} catch (Exception e) {
logger.error("Error while preloading tree at [{}]", treeRoot, e);
}
stopWatch.stop();
logger.info("Preload of tree [{}] with depth {} completed in {} secs", treeRoot, depth,
stopWatch.getTime(TimeUnit.SECONDS));
}
}
public ChannelResponse sendEvent(String topic, FileEvent fileEvent) throws BrokerException {
if (this.subTopics.contains(topic) || this.subVerifyTopics.containsKey(topic)) {
log.error("this is already receiver side for topic: {}", topic);
throw new BrokerException(ErrorCode.FILE_SENDER_RECEIVER_CONFLICT);
}
byte[] json = JsonHelper.object2JsonBytes(fileEvent);
ChannelRequest channelRequest = new ChannelRequest();
channelRequest.setToTopic(topic);
channelRequest.setMessageID(this.service.newSeq());
channelRequest.setTimeout(this.service.getConnectSeconds() * 1000);
channelRequest.setContent(json);
log.info("send channel request, topic: {} {} id: {}", channelRequest.getToTopic(), fileEvent.getEventType(), channelRequest.getMessageID());
ChannelResponse rsp;
StopWatch sw = StopWatch.createStarted();
if (this.senderVerifyTopics.containsKey(topic)) {
log.info("over verified AMOP channel");
rsp = this.service.sendChannelMessageForVerifyTopic(channelRequest);
} else {
rsp = this.service.sendChannelMessage2(channelRequest);
}
sw.stop();
log.info("receive channel response, id: {} result: {}-{} cost: {}", rsp.getMessageID(), rsp.getErrorCode(), rsp.getErrorMessage(), sw.getTime());
return rsp;
}
@Before
public void before() throws Exception {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
this.sqlgGraph = SqlgGraph.open(configuration);
SqlgUtil.dropDb(this.sqlgGraph);
this.sqlgGraph.tx().commit();
this.sqlgGraph.close();
this.sqlgGraph = SqlgGraph.open(configuration);
grantReadOnlyUserPrivileges();
assertNotNull(this.sqlgGraph);
assertNotNull(this.sqlgGraph.getBuildVersion());
this.gt = this.sqlgGraph.traversal();
if (configuration.getBoolean("distributed", false)) {
this.sqlgGraph1 = SqlgGraph.open(configuration);
assertNotNull(this.sqlgGraph1);
assertEquals(this.sqlgGraph.getBuildVersion(), this.sqlgGraph1.getBuildVersion());
}
stopWatch.stop();
logger.info("Startup time for test = " + stopWatch.toString());
}
public void execute(){
try {
if(isRunning.get()){
return;
}
isRunning.set(true);
StopWatch stopWatch = new StopWatch();
stopWatch.start();
LOG.info("job start");
executeInternal();
LOG.info("job finished, takes {} ms", stopWatch.getTime());
} catch (Exception e) {
LOG.error("run error", e);
}finally{
isRunning.set(false);
}
}
private void updatePredictedCounts(){
StopWatch stopWatch = new StopWatch();
if (logger.isDebugEnabled()){
stopWatch.start();
}
IntStream intStream;
if (isParallel){
intStream = IntStream.range(0,numParameters).parallel();
} else {
intStream = IntStream.range(0,numParameters);
}
intStream.forEach(i -> this.predictedCounts.set(i, calPredictedCount(i)));
if (logger.isDebugEnabled()){
logger.debug("time spent on updatePredictedCounts = "+stopWatch);
}
}
public void xxxxLookupErrataByAdvisoryType() throws IOException {
String bugfix = "Bug Fix Advisory";
String pea = "Product Enhancement Advisory";
String security = "Security Advisory";
StopWatch st = new StopWatch();
st.start();
List erratas = ErrataManager.lookupErrataByType(bugfix);
outputErrataList(erratas);
System.out.println("Got bugfixes: " + erratas.size() + " time: " + st);
assertTrue(erratas.size() > 0);
erratas = ErrataManager.lookupErrataByType(pea);
outputErrataList(erratas);
System.out.println("Got pea enhancments: " + erratas.size() + " time: " + st);
assertTrue(erratas.size() > 0);
erratas = ErrataManager.lookupErrataByType(security);
outputErrataList(erratas);
assertTrue(erratas.size() > 0);
System.out.println("Got security advisories: " + erratas.size() + " time: " + st);
st.stop();
System.out.println("TIME: " + st.getTime());
}
/**
* Promote all build dependencies NOT ALREADY CAPTURED to the hosted repository holding store for the shared imports
* and return dependency artifacts meta data.
*
* @param report The tracking report that contains info about artifacts downloaded by the build
* @param promote flag if collected dependencies should be promoted
* @return List of dependency artifacts meta data
* @throws RepositoryManagerException In case of a client API transport error or an error during promotion of
* artifacts
* @throws PromotionValidationException when the promotion process results in an error due to validation failure
*/
private List<Artifact> processDownloads(final TrackedContentDTO report, final boolean promote)
throws RepositoryManagerException, PromotionValidationException {
List<Artifact> deps;
logger.info("BEGIN: Process artifacts downloaded by build");
userLog.info("Processing dependencies");
StopWatch stopWatch = StopWatch.createStarted();
Set<TrackedContentEntryDTO> downloads = report.getDownloads();
if (CollectionUtils.isEmpty(downloads)) {
deps = Collections.emptyList();
} else {
deps = collectDownloadedArtifacts(report);
if (promote) {
Map<StoreKey, Map<StoreKey, Set<String>>> depMap = collectDownloadsPromotionMap(downloads);
promoteDownloads(depMap);
}
}
logger.info("END: Process artifacts downloaded by build, took {} seconds", stopWatch.getTime(TimeUnit.SECONDS));
return deps;
}
@Override
protected void updateBinaryClassifier(int component, int label, MultiLabelClfDataSet activeDataset, double[] activeGammas) {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
if (cbm.binaryClassifiers[component][label] == null || cbm.binaryClassifiers[component][label] instanceof PriorProbClassifier){
cbm.binaryClassifiers[component][label] = new LogisticRegression(2, activeDataset.getNumFeatures());
}
int[] binaryLabels = DataSetUtil.toBinaryLabels(activeDataset.getMultiLabels(), label);
double[][] targetsDistribution = DataSetUtil.labelsToDistributions(binaryLabels, 2);
// no parallelism
ElasticNetLogisticTrainer elasticNetLogisticTrainer = new ElasticNetLogisticTrainer.Builder((LogisticRegression)
cbm.binaryClassifiers[component][label], activeDataset, 2, targetsDistribution, activeGammas)
.setRegularization(regularizationBinary)
.setL1Ratio(l1RatioBinary)
.setLineSearch(lineSearch).build();
elasticNetLogisticTrainer.setActiveSet(activeSet);
elasticNetLogisticTrainer.getTerminator().setMaxIteration(this.binaryUpdatesPerIter);
elasticNetLogisticTrainer.optimize();
if (logger.isDebugEnabled()){
logger.debug("time spent on updating component "+component+" label "+label+" = "+stopWatch);
}
}
private static void timerTests(final CellBlockBuilder builder, final int count, final int size,
final Codec codec, final CompressionCodec compressor) throws IOException {
final int cycles = 1000;
StopWatch timer = new StopWatch();
timer.start();
for (int i = 0; i < cycles; i++) {
timerTest(builder, timer, count, size, codec, compressor, false);
}
timer.stop();
LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + false + ", count="
+ count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
timer.reset();
timer.start();
for (int i = 0; i < cycles; i++) {
timerTest(builder, timer, count, size, codec, compressor, true);
}
timer.stop();
LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + true + ", count="
+ count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
}
/**
* second stage
* @throws Exception
*/
static void spam_resume_train_2() throws Exception{
System.out.println("loading ensemble");
LKBoost lkBoost = LKBoost.deserialize(new File(TMP, "/LKTreeBoostTest/ensemble.ser"));
ClfDataSet dataSet = TRECFormat.loadClfDataSet(new File(DATASETS,"spam/trec_data/train.trec"),
DataSetType.CLF_DENSE,true);
LKBoostOptimizer trainer = new LKBoostOptimizer(lkBoost,dataSet);
trainer.initialize();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
for (int round =50;round<100;round++){
System.out.println("round="+round);
trainer.iterate();
}
stopWatch.stop();
System.out.println(stopWatch);
double accuracy = Accuracy.accuracy(lkBoost,dataSet);
System.out.println(accuracy);
}
@Test
public void testVerticesBatchOn() throws InterruptedException {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
this.sqlgGraph.tx().normalBatchModeOn();
for (int i = 0; i < 10000; i++) {
Vertex v1 = this.sqlgGraph.addVertex(T.label, "MO1", "name", "marko" + i);
Vertex v2 = this.sqlgGraph.addVertex(T.label, "Person", "name", "marko" + i);
v1.addEdge("Friend", v2, "name", "xxx");
}
this.sqlgGraph.tx().commit();
stopWatch.stop();
System.out.println(stopWatch.toString());
testVerticesBatchOn_assert(this.sqlgGraph);
if (this.sqlgGraph1 != null) {
Thread.sleep(1000);
testVerticesBatchOn_assert(this.sqlgGraph1);
}
}
@Override
public GetResult execute(RealTimeGet search, DocumentFactory assets) {
try {
final StopWatch elapsedTime = StopWatch.createStarted();
final MultiGetResponse response = elasticSearchClient.realTimeGet(search.getValues());
elapsedTime.stop();
if(response!=null){
return ResultUtils.buildRealTimeGetResult(response, search, assets, elapsedTime.getTime()).setElapsedTime(elapsedTime.getTime());
}else {
log.error("Null result from ElasticClient");
throw new SearchServerException("Null result from ElasticClient");
}
} catch (ElasticsearchException | IOException e) {
log.error("Cannot execute realTime get query");
throw new SearchServerException("Cannot execute realTime get query", e);
}
}
private IndexResult indexSingleDocument(Document doc, int withinMs) {
log.warn("Parameter 'within' not in use in elastic search backend");
final StopWatch elapsedTime = StopWatch.createStarted();
final Map<String,Object> document = DocumentUtil.createInputDocument(doc);
try {
if (elasticClientLogger.isTraceEnabled()) {
elasticClientLogger.debug(">>> add({})", doc.getId());
} else {
elasticClientLogger.debug(">>> add({})", doc.getId());
}
final BulkResponse response = this.elasticSearchClient.add(document);
elapsedTime.stop();
return new IndexResult(response.getTook().getMillis()).setElapsedTime(elapsedTime.getTime());
} catch (ElasticsearchException | IOException e) {
log.error("Cannot index document {}", document.get(FieldUtil.ID) , e);
throw new SearchServerException("Cannot index document", e);
}
}
@Override
public void deleteBuildGroup() throws RepositoryManagerException {
logger.info("BEGIN: Removing build aggregation group: {}", buildContentId);
userLog.info("Removing build aggregation group");
StopWatch stopWatch = StopWatch.createStarted();
try {
StoreKey key = new StoreKey(packageType, StoreType.group, buildContentId);
serviceAccountIndy.stores().delete(key, "[Post-Build] Removing build aggregation group: " + buildContentId);
} catch (IndyClientException e) {
throw new RepositoryManagerException(
"Failed to retrieve Indy stores module. Reason: %s",
e,
e.getMessage());
}
logger.info(
"END: Removing build aggregation group: {}, took: {} seconds",
buildContentId,
stopWatch.getTime(TimeUnit.SECONDS));
stopWatch.reset();
}
private ArrayList<SqlgVertex> createMilCarVertex() {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
ArrayList<SqlgVertex> result = new ArrayList<>();
this.sqlgGraph.tx().normalBatchModeOn();
for (int i = 1; i < NUMBER_OF_VERTICES + 1; i++) {
Map<String, Object> keyValue = new LinkedHashMap<>();
for (int j = 0; j < 100; j++) {
keyValue.put("name" + j, "aaaaaaaaaa" + i);
}
SqlgVertex car = (SqlgVertex) this.sqlgGraph.addVertex("Car", keyValue);
result.add(car);
if (i % (NUMBER_OF_VERTICES / 10) == 0) {
this.sqlgGraph.tx().commit();
this.sqlgGraph.tx().normalBatchModeOn();
}
}
this.sqlgGraph.tx().commit();
stopWatch.stop();
System.out.println("createMilCarVertex took " + stopWatch.toString());
return result;
}
private static void reportAccPrediction(Config config, CBM cbm, MultiLabelClfDataSet dataSet, String name) throws Exception{
System.out.println("============================================================");
System.out.println("Making predictions on "+name +" set with the instance set accuracy optimal predictor");
String output = config.getString("output.dir");
AccPredictor accPredictor = new AccPredictor(cbm);
accPredictor.setComponentContributionThreshold(config.getDouble("predict.piThreshold"));
StopWatch stopWatch = new StopWatch();
stopWatch.start();
MultiLabel[] predictions = accPredictor.predict(dataSet);
System.out.println("time spent on prediction = "+stopWatch);
MLMeasures mlMeasures = new MLMeasures(dataSet.getNumClasses(),dataSet.getMultiLabels(),predictions);
System.out.println(name+" performance with the instance set accuracy optimal predictor");
System.out.println(mlMeasures);
File performanceFile = Paths.get(output,name+"_predictions", "instance_accuracy_optimal","performance.txt").toFile();
FileUtils.writeStringToFile(performanceFile, mlMeasures.toString());
System.out.println(name+" performance is saved to "+performanceFile.toString());
// Here we do not use approximation
double[] setProbs = IntStream.range(0, predictions.length).parallel().
mapToDouble(i->cbm.predictAssignmentProb(dataSet.getRow(i),predictions[i])).toArray();
File predictionFile = Paths.get(output,name+"_predictions", "instance_accuracy_optimal","predictions.txt").toFile();
try (BufferedWriter br = new BufferedWriter(new FileWriter(predictionFile))){
for (int i=0;i<dataSet.getNumDataPoints();i++){
br.write(predictions[i].toString());
br.write(":");
br.write(""+setProbs[i]);
br.newLine();
}
}
System.out.println("predicted sets and their probabilities are saved to "+predictionFile.getAbsolutePath());
boolean individualPerformance = true;
if (individualPerformance){
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.writeValue(Paths.get(output,name+"_predictions", "instance_accuracy_optimal","individual_performance.json").toFile(),mlMeasures.getMacroAverage());
}
System.out.println("============================================================");
}
public List<NsRef> retrieveCustomizationIds(final BasicRecordType type) throws NetSuiteException {
GetCustomizationIdResult result = clientService.execute(new NetSuiteClientService.PortOperation<GetCustomizationIdResult, NetSuitePortType>() {
@Override public GetCustomizationIdResult execute(NetSuitePortType port) throws Exception {
logger.debug("Retrieving customization IDs: {}", type.getType());
StopWatch stopWatch = new StopWatch();
try {
stopWatch.start();
final GetCustomizationIdRequest request = new GetCustomizationIdRequest();
CustomizationType customizationType = new CustomizationType();
customizationType.setGetCustomizationType(GetCustomizationType.fromValue(type.getType()));
request.setCustomizationType(customizationType);
return port.getCustomizationId(request).getGetCustomizationIdResult();
} finally {
stopWatch.stop();
logger.debug("Retrieved customization IDs: {}, {}", type.getType(), stopWatch);
}
}
});
if (result.getStatus().getIsSuccess()) {
List<NsRef> nsRefs;
if (result.getTotalRecords() > 0) {
final List<CustomizationRef> refs = result.getCustomizationRefList().getCustomizationRef();
nsRefs = new ArrayList<>(refs.size());
for (final CustomizationRef ref : refs) {
NsRef nsRef = new NsRef();
nsRef.setRefType(RefType.CUSTOMIZATION_REF);
nsRef.setScriptId(ref.getScriptId());
nsRef.setInternalId(ref.getInternalId());
nsRef.setType(ref.getType().value());
nsRef.setName(ref.getName());
nsRefs.add(nsRef);
}
} else {
nsRefs = Collections.emptyList();
}
return nsRefs;
} else {
throw new NetSuiteException("Retrieving of customizations was not successful: " + type);
}
}
@Test
public void testBulkWithinMultipleHasContainers() throws InterruptedException {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
Vertex god = this.sqlgGraph.addVertex(T.label, "God");
Vertex person1 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 1, "name", "pete");
god.addEdge("creator", person1);
Vertex person2 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 2, "name", "pete");
god.addEdge("creator", person2);
Vertex person3 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 3, "name", "john");
god.addEdge("creator", person3);
Vertex person4 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 4, "name", "pete");
god.addEdge("creator", person4);
Vertex person5 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 5, "name", "pete");
god.addEdge("creator", person5);
Vertex person6 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 6, "name", "pete");
god.addEdge("creator", person6);
Vertex person7 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 7, "name", "pete");
god.addEdge("creator", person7);
Vertex person8 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 8, "name", "pete");
god.addEdge("creator", person8);
Vertex person9 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 9, "name", "pete");
god.addEdge("creator", person9);
Vertex person10 = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", 10, "name", "pete");
god.addEdge("creator", person10);
this.sqlgGraph.tx().commit();
stopWatch.stop();
System.out.println(stopWatch.toString());
stopWatch.reset();
stopWatch.start();
testBulkWithinMultipleHasContainers_assert(this.sqlgGraph);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testBulkWithinMultipleHasContainers_assert(this.sqlgGraph1);
}
stopWatch.stop();
System.out.println(stopWatch.toString());
}
public void generatePendingCodes() {
StopWatch stopWatch = new StopWatch();
log.trace("start pending codes generation");
stopWatch.start();
specialPriceRepository.findWaitingElements().forEach(this::generateCode);
stopWatch.stop();
log.trace("end. Took {} ms", stopWatch.getTime());
}
@Test
@Ignore("This test requires a missing resource.")
public void testLoadGoTaxon() throws Exception {
ParserWrapper pw = new ParserWrapper();
pw.addIRIMapper(new CatalogXmlIRIMapper(catalogXml));
OWLGraphWrapper g = pw.parseToOWLGraph(goFile);
GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
GafSolrDocumentLoaderIntegrationRunner.gc();
GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
ConfigManager configManager = new ConfigManager();
configManager.add("src/test/resources/test-ont-config.yaml");
StopWatch watch = new StopWatch();
watch.start();
FlexCollection c = new FlexCollection(configManager, g);
GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
GafSolrDocumentLoaderIntegrationRunner.gc();
GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
FlexSolrDocumentLoader loader = new FlexSolrDocumentLoader((SolrServer)null, c) {
@Override
protected void addToServer(Collection<SolrInputDocument> docs)
throws SolrServerException, IOException {
solrCounter += docs.size();
}
};
loader.load();
watch.stop();
GafSolrDocumentLoaderIntegrationRunner.printMemoryStats();
System.out.println("Loaded "+solrCounter+" Solr docs in "+watch);
assertTrue(solrCounter > 0);
}
private StopWatch getStopWatch(final T key) {
if (stopWatches.containsKey(key)) {
return stopWatches.get(key);
} else {
final StopWatch sw = new StopWatch();
stopWatches.put(key, sw);
return sw;
}
}
@Test
public void testRetrieveCustomRecordCustomFields() throws Exception {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
RecordTypeInfo recordType = connection.getMetaDataSource()
.getRecordType("customrecord_campaign_revenue");
TypeDesc typeDesc = connection.getMetaDataSource().getTypeInfo(recordType.getName());
logger.debug("Record type desc: {}", typeDesc.getTypeName());
stopWatch.stop();
}
private StyleSpans<Collection<String>> computeHighlighting(String text) {
StopWatch s = new StopWatch();
s.start();
try {
if (getCurrentContenttypePlugin() != null && !shouldSkipHighlighting(text))
return getCurrentContenttypePlugin().computeHighlighting(text);
else
return noHighlight(text);
} finally {
s.stop();
// System.out.println("Highlighting code: " + s.getTime() + " ms");
}
}
@Test
public void testBulkWithout() throws InterruptedException {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
if (this.sqlgGraph.getSqlDialect().supportsBatchMode()) {
this.sqlgGraph.tx().normalBatchModeOn();
}
Vertex god = this.sqlgGraph.addVertex(T.label, "God");
List<String> uuids = new ArrayList<>();
for (int i = 0; i < 100; i++) {
String uuid = UUID.randomUUID().toString();
uuids.add(uuid);
Vertex person = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", uuid);
god.addEdge("creator", person);
}
this.sqlgGraph.tx().commit();
stopWatch.stop();
System.out.println(stopWatch.toString());
stopWatch.reset();
stopWatch.start();
testBulkWithout_assert(this.sqlgGraph, uuids);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testBulkWithout_assert(this.sqlgGraph1, uuids);
}
stopWatch.stop();
System.out.println(stopWatch.toString());
}
public void aTestExecute() throws Exception {
StopWatch sw = new StopWatch();
ErrataCacheTask ect = new ErrataCacheTask();
sw.start();
ect.execute(null);
sw.stop();
System.out.println("ErrataCacheTask took [" + sw.getTime() + "]");
}
@Test
public void testMilCompleteEdges() {
ArrayList<SqlgVertex> persons = createMilPersonVertex();
ArrayList<SqlgVertex> cars = createMilCarVertex();
this.sqlgGraph.tx().commit();
StopWatch stopWatch = new StopWatch();
stopWatch.start();
this.sqlgGraph.tx().streamingBatchModeOn();
LinkedHashMap<String, Object> keyValues = new LinkedHashMap<>();
keyValues.put("name", "halo");
keyValues.put("name2", "halo");
for (int i = 0; i < NUMBER_OF_VERTICES; i++) {
SqlgVertex person = persons.get(0);
SqlgVertex car = cars.get(i);
person.streamEdge("person_car", car, keyValues);
}
this.sqlgGraph.tx().commit();
int mb = 1024 * 1024;
// get Runtime instance
Runtime instance = Runtime.getRuntime();
System.out.println("***** Heap utilization statistics [MB] *****\n");
// available memory
System.out.println("Total Memory: " + instance.totalMemory() / mb);
// free memory
System.out.println("Free Memory: " + instance.freeMemory() / mb);
// used memory
System.out.println("Used Memory: "
+ (instance.totalMemory() - instance.freeMemory()) / mb);
// Maximum available memory
System.out.println("Max Memory: " + instance.maxMemory() / mb);
Assert.assertEquals(NUMBER_OF_VERTICES, this.sqlgGraph.traversal().V(persons.get(0)).out("person_car").toList().size());
stopWatch.stop();
System.out.println("testMilCompleteEdges took " + stopWatch.toString());
}
@Test
public void testBulkWithinVertexCompileStep() throws InterruptedException {
StopWatch stopWatch = new StopWatch();
stopWatch.start();
if (this.sqlgGraph.getSqlDialect().supportsBatchMode()) {
this.sqlgGraph.tx().normalBatchModeOn();
}
Vertex god = this.sqlgGraph.addVertex(T.label, "God");
List<String> uuids = new ArrayList<>();
for (int i = 0; i < 100; i++) {
String uuid = UUID.randomUUID().toString();
uuids.add(uuid);
Vertex person = this.sqlgGraph.addVertex(T.label, "Person", "idNumber", uuid);
god.addEdge("creator", person);
}
this.sqlgGraph.tx().commit();
stopWatch.stop();
System.out.println(stopWatch.toString());
stopWatch.reset();
stopWatch.start();
testBulkWithinVertexCompileStep_assert(this.sqlgGraph, god, uuids);
if (this.sqlgGraph1 != null) {
Thread.sleep(SLEEP_TIME);
testBulkWithinVertexCompileStep_assert(this.sqlgGraph1, god, uuids);
}
stopWatch.stop();
System.out.println(stopWatch.toString());
}
private static void reportHammingPrediction(Config config, CBM cbm, MultiLabelClfDataSet dataSet, String name) throws Exception{
System.out.println("============================================================");
System.out.println("Making predictions on "+name+" set with the instance Hamming loss optimal predictor");
String output = config.getString("output.dir");
MarginalPredictor marginalPredictor = new MarginalPredictor(cbm);
marginalPredictor.setPiThreshold(config.getDouble("predict.piThreshold"));
StopWatch stopWatch = new StopWatch();
stopWatch.start();
MultiLabel[] predictions = marginalPredictor.predict(dataSet);
System.out.println("time spent on prediction = "+stopWatch);
MLMeasures mlMeasures = new MLMeasures(dataSet.getNumClasses(),dataSet.getMultiLabels(),predictions);
System.out.println(name+" performance with the instance Hamming loss optimal predictor");
System.out.println(mlMeasures);
File performanceFile = Paths.get(output,name+"_predictions", "instance_hamming_loss_optimal","performance.txt").toFile();
FileUtils.writeStringToFile(performanceFile, mlMeasures.toString());
System.out.println(name+" performance is saved to "+performanceFile.toString());
// Here we do not use approximation
double[] setProbs = IntStream.range(0, predictions.length).parallel().
mapToDouble(i->cbm.predictAssignmentProb(dataSet.getRow(i),predictions[i])).toArray();
File predictionFile = Paths.get(output,name+"_predictions", "instance_hamming_loss_optimal","predictions.txt").toFile();
try (BufferedWriter br = new BufferedWriter(new FileWriter(predictionFile))){
for (int i=0;i<dataSet.getNumDataPoints();i++){
br.write(predictions[i].toString());
br.write(":");
br.write(""+setProbs[i]);
br.newLine();
}
}
System.out.println("predicted sets and their probabilities are saved to "+predictionFile.getAbsolutePath());
boolean individualPerformance = true;
if (individualPerformance){
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.writeValue(Paths.get(output,name+"_predictions", "instance_hamming_loss_optimal","individual_performance.json").toFile(),mlMeasures.getMacroAverage());
}
System.out.println("============================================================");
}
@Ignore
@Test
public void stressTestCreateLog()
{
// this log will be delivered to snowflake
TelemetryService service = TelemetryService.getInstance();
// send one http request for each event
StopWatch sw = new StopWatch();
sw.start();
int rate = 50;
int sent = 0;
int duration = 60;
while (sw.getTime() < duration * 1000)
{
int toSend = (int) (sw.getTime() / 1000) * rate - sent;
for (int i = 0; i < toSend; i++)
{
TelemetryEvent log = new TelemetryEvent.LogBuilder()
.withName("StressTestLog")
.withValue("This is an example log for stress test " + sent)
.build();
System.out.println("stress test: " + sent++ + " sent.");
service.report(log);
}
}
sw.stop();
}
@Override
public RepositoryStatistics getLastStatistics( String repositoryId )
throws MetadataRepositoryException
{
StopWatch stopWatch = new StopWatch();
stopWatch.start();
try(RepositorySession session = repositorySessionFactory.createSession()) {
final MetadataRepository metadataRepository = session.getRepository( );
// TODO: consider a more efficient implementation that directly gets the last one from the content repository
List<String> scans = metadataRepository.getMetadataFacets(session, repositoryId, DefaultRepositoryStatistics.FACET_ID);
if (scans == null) {
return null;
}
Collections.sort(scans);
if (!scans.isEmpty()) {
String name = scans.get(scans.size() - 1);
RepositoryStatistics repositoryStatistics =
RepositoryStatistics.class.cast(metadataRepository.getMetadataFacet(session, repositoryId,
RepositoryStatistics.FACET_ID, name));
stopWatch.stop();
log.debug("time to find last RepositoryStatistics: {} ms", stopWatch.getTime());
return repositoryStatistics;
} else {
return null;
}
}
}