java.util.HashMap#entrySet ( )源码实例Demo

下面列出了java.util.HashMap#entrySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: TelegramBotsExample   文件: FilesHandlers.java
private void onDeleteCommandWithoutParameters(Message message, String language) throws InvalidObjectException, TelegramApiException {
    DatabaseManager.getInstance().addUserForFile(message.getFrom().getId(), DELETE_UPLOADED_STATUS);
    SendMessage sendMessageRequest = new SendMessage();
    sendMessageRequest.setText(LocalisationService.getString("deleteUploadedFile", language));
    sendMessageRequest.setChatId(message.getChatId());
    HashMap<String, String> files = DatabaseManager.getInstance().getFilesByUser(message.getFrom().getId());
    ReplyKeyboardMarkup replyKeyboardMarkup = new ReplyKeyboardMarkup();
    if (files.size() > 0) {
        List<KeyboardRow> commands = new ArrayList<>();
        for (Map.Entry<String, String> entry : files.entrySet()) {
            KeyboardRow commandRow = new KeyboardRow();
            commandRow.add(Commands.deleteCommand + " " + entry.getKey() + " " + Emoji.LEFT_RIGHT_ARROW.toString()
                    + " " + entry.getValue());
            commands.add(commandRow);
        }
        replyKeyboardMarkup.setResizeKeyboard(true);
        replyKeyboardMarkup.setOneTimeKeyboard(true);
        replyKeyboardMarkup.setKeyboard(commands);
    }
    sendMessageRequest.setReplyMarkup(replyKeyboardMarkup);
    execute(sendMessageRequest);
}
 
/**
 *
 * @param baseUrl
 * @param datasources
 */
public IndexRequestProcessorForTPFs(
                           final String baseUrl,
                           final HashMap<String, IDataSource> datasources )
{
    this.model = ModelFactory.createDefaultModel();

    for (Map.Entry<String, IDataSource> entry : datasources.entrySet()) {
        String datasourceName = entry.getKey();
        IDataSource datasource = entry.getValue();

        Resource datasourceUrl = new ResourceImpl(baseUrl + "/" + datasourceName);

        model.add(datasourceUrl, new PropertyImpl(RDF + "type"), new ResourceImpl(VOID + "Dataset"));
        model.add(datasourceUrl, new PropertyImpl(RDFS + "label"), datasource.getTitle());
        model.add(datasourceUrl, new PropertyImpl(DC + "title"), datasource.getTitle());
        model.add(datasourceUrl, new PropertyImpl(DC + "description"), datasource.getDescription());
    }
}
 
源代码3 项目: KEEL   文件: ResultsProccessor.java
private void calcMeans()
{
    //HashMap<String, Double> measuresFirst = algorithmMeasures.get(actualAlgorithm);
    for (Map.Entry<String, HashMap<String, Double> > entry : algorithmMeasures.entrySet()) {
        String alg = entry.getKey();
        HashMap<String, Double> measuresFirst = entry.getValue();
        
        for (Map.Entry<String, Double> measure : measuresFirst.entrySet())
        {
            String measureName = measure.getKey();
            if(!excludedFromAverage.contains(measureName))
            {
                Double measureValue = measure.getValue() / algorithmTotalRules.get(alg);
                algorithmMeasures.get(alg).put(measureName, measureValue);
            }
        }
    }
}
 
private void createKeyToResultsAndFillOptimizationStructure(Map<String,Map<String, List<Object>>> optimizationTermsFilterStructure, TableInJoinRequestBuilder firstTableRequest) {
    List<SearchHit> firstTableHits = fetchAllHits(firstTableRequest);

    int resultIds = 1;
    for (SearchHit hit : firstTableHits) {
        HashMap<String, List<Map.Entry<Field, Field>>> comparisons = this.hashJoinComparisonStructure.getComparisons();
        for (Map.Entry<String, List<Map.Entry<Field, Field>>> comparison : comparisons.entrySet()) {
            String comparisonID = comparison.getKey();
            List<Map.Entry<Field, Field>> t1ToT2FieldsComparison = comparison.getValue();

            String key = getComparisonKey(t1ToT2FieldsComparison, hit, true, optimizationTermsFilterStructure.get(comparisonID));

            //int docid , id
            SearchHit searchHit = new SearchHit(resultIds, hit.getId(), new Text(hit.getType()), hit.getFields(), null);
            searchHit.sourceRef(hit.getSourceRef());

            onlyReturnedFields(searchHit.getSourceAsMap(), firstTableRequest.getReturnedFields(),firstTableRequest.getOriginalSelect().isSelectAll());
            resultIds++;
            this.hashJoinComparisonStructure.insertIntoComparisonHash(comparisonID, key, searchHit);
        }
    }
}
 
private static void printOutContacts(ArrayList<DesiredChrContact> contacts, TextFile outWriter) throws IOException {
//        System.out.println("Write contacts to file.");
        HashMap<String, Boolean> textToStore = new HashMap<>();

        for (DesiredChrContact c : contacts) {
            String key = c.getProbeName() + "-" + c.getSnpName();

            if (c.hasContact()) {
                textToStore.put(key, Boolean.TRUE);
            } else if (!textToStore.containsKey(key)) {
                textToStore.put(key, Boolean.FALSE);
            }
        }
        for (Entry<String, Boolean> contactInfo : textToStore.entrySet()) {
            outWriter.write(contactInfo.getKey() + "\t" + contactInfo.getValue() + "\n");
        }
    }
 
源代码6 项目: java-sdk   文件: AipContentCensor.java
/**
 * 头像审核接口
 * @param imgData 图片二进制数据数组
 * @param options 可选参数
 * @return JSONObject
 */
public JSONObject faceAudit(byte[][] imgData, HashMap<String, String> options) {
    AipRequest request = new AipRequest();
    ArrayList<String> buffer = new ArrayList<String>();
    for (byte[] data : imgData) {
        String base64Str = Base64Util.encode(data);
        buffer.add(base64Str);
    }
    String imgDataAll = Util.mkString(buffer.iterator(), ',');

    request.addBody("images", imgDataAll);
    if (options != null) {
        for (Map.Entry<String, String> entry : options.entrySet()) {
            request.addBody(entry.getKey(), entry.getValue());
        }
    }

    return faceAuditHelper(request, options);
}
 
源代码7 项目: io   文件: BatchAdapter.java
/**
 * This method updates the ChangeSet and creates header for Command.
 * @param url Value
 * @param data Value
 * @param etag Value
 * @param map Header Map
 * @param contentType Value
 * @return DcBatchRespose object
 * @throws DaoException Exception thrown
 */
@Override
public DcResponse put(String url, String data, String etag, HashMap<String, String> map, String contentType)
        throws DaoException {
    Command cmd = new Command();
    cmd.method = HttpMethods.PUT;
    cmd.url = url;
    cmd.addHeader("Content-Type", contentType);
    cmd.etag = etag;
    cmd.setBody(data);
    for (Map.Entry<String, String> entry : map.entrySet()) {
        cmd.addHeader(entry.getKey(), entry.getValue());
    }
    appendChangeSet(cmd.get());
    return new DcBatchRespose();
}
 
源代码8 项目: jaamsim   文件: MeshData.java
private void populateActionList() {
	final HashMap<String, Double> actionMap = new HashMap<>();
	class ActionWalker extends TreeWalker {
		@Override
		public void onNode(Mat4d trans, Mat4d invTrans, TreeNode node) {

			if (node.trans instanceof AnimTrans) {
				AnimTrans at = (AnimTrans)node.trans;

				for (Act act : at.actions) {
					Double existingTime = actionMap.get(act.name);
					double lastTime = act.times[act.times.length-1];
					if (existingTime == null || lastTime > existingTime) {
						actionMap.put(act.name, lastTime);
					}
				}
			}
		}
	}

	ActionWalker actionWalker = new ActionWalker();
	walkTree(actionWalker, treeRoot, new Mat4d(), new Mat4d(), null);

	_actionDesc = new ArrayList<>();
	for (Map.Entry<String, Double> entry : actionMap.entrySet()) {
		Action.Description desc = new Action.Description();
		desc.name = entry.getKey();
		desc.duration = entry.getValue();
		_actionDesc.add(desc);
	}


}
 
源代码9 项目: Android_framework   文件: BaseDB.java
private ContentValues parseHashMapToContentValues(HashMap<String, String> map){
    ContentValues values = new ContentValues();
    for (Map.Entry<String, String> entry : map.entrySet()) {
        values.put(entry.getKey(), entry.getValue());
    }
    return values;
}
 
源代码10 项目: blade-ink   文件: Json.java
@SuppressWarnings("unchecked")
static Container jArrayToContainer(List<Object> jArray) throws Exception {
	Container container = new Container();
	container.setContent(jArrayToRuntimeObjList(jArray, true));
	// Final RTObject in the array is always a combination of
	// - named content
	// - a "#" key with the countFlags
	// (if either exists at all, otherwise null)
	HashMap<String, Object> terminatingObj = (HashMap<String, Object>) jArray.get(jArray.size() - 1);
	if (terminatingObj != null) {
		HashMap<String, RTObject> namedOnlyContent = new HashMap<>(terminatingObj.size());
		for (Entry<String, Object> keyVal : terminatingObj.entrySet()) {
			if ("#f".equals(keyVal.getKey())) {
				container.setCountFlags((int) keyVal.getValue());
			} else if ("#n".equals(keyVal.getKey())) {
				container.setName(keyVal.getValue().toString());
			} else {
				RTObject namedContentItem = jTokenToRuntimeObject(keyVal.getValue());
				Container namedSubContainer = namedContentItem instanceof Container ? (Container) namedContentItem
						: (Container) null;
				if (namedSubContainer != null)
					namedSubContainer.setName(keyVal.getKey());

				namedOnlyContent.put(keyVal.getKey(), namedContentItem);
			}
		}
		container.setNamedOnlyContent(namedOnlyContent);
	}

	return container;
}
 
源代码11 项目: rocketmq-4.3.0   文件: RebalanceImpl.java
public void unlockAll(final boolean oneway) {
//        构建处理队列根据brokerName=》
        HashMap<String, Set<MessageQueue>> brokerMqs = this.buildProcessQueueTableByBrokerName();

        for (final Map.Entry<String, Set<MessageQueue>> entry : brokerMqs.entrySet()) {
            final String brokerName = entry.getKey();
            final Set<MessageQueue> mqs = entry.getValue();

            if (mqs.isEmpty())
                continue;

//            根据brokerName查询broker master=》
            FindBrokerResult findBrokerResult = this.mQClientFactory.findBrokerAddressInSubscribe(brokerName, MixAll.MASTER_ID, true);
            if (findBrokerResult != null) {
                UnlockBatchRequestBody requestBody = new UnlockBatchRequestBody();
                requestBody.setConsumerGroup(this.consumerGroup);
                requestBody.setClientId(this.mQClientFactory.getClientId());
                requestBody.setMqSet(mqs);

                try {
//                    批量解锁消息对列=》
                    this.mQClientFactory.getMQClientAPIImpl().unlockBatchMQ(findBrokerResult.getBrokerAddr(), requestBody, 1000, oneway);

                    for (MessageQueue mq : mqs) {
                        ProcessQueue processQueue = this.processQueueTable.get(mq);
                        if (processQueue != null) {
//                            解锁处理队列
                            processQueue.setLocked(false);
                            log.info("the message queue unlock OK, Group: {} {}", this.consumerGroup, mq);
                        }
                    }
                } catch (Exception e) {
                    log.error("unlockBatchMQ exception, " + mqs, e);
                }
            }
        }
    }
 
源代码12 项目: jxapi   文件: Attachment.java
private JsonElement serializeHash(HashMap<String, String> map) {
    JsonObject obj = new JsonObject();
    for (Entry<String, String> item : map.entrySet()) {
        obj.addProperty(item.getKey(), item.getValue());
    }
    return obj;
}
 
源代码13 项目: Alink   文件: DocCountVectorizerModelMapper.java
public static SparseVector predictSparseVector(String content, double minTF,
                                               HashMap<String, Tuple2<Integer, Double>> wordIdWeight,
                                               FeatureType featureType, int featureNum) {
    HashMap<String, Integer> wordCount = new HashMap<>(0);

    String[] tokens = content.split(NLPConstant.WORD_DELIMITER);
    double minTermCount = minTF >= 1.0 ? minTF : minTF * tokens.length;
    double tokenRatio = 1.0 / tokens.length;

    for (String token : tokens) {
        if (wordIdWeight.containsKey(token)) {
            wordCount.merge(token, 1, Integer::sum);
        }
    }
    int[] indexes = new int[wordCount.size()];
    double[] values = new double[indexes.length];
    int pos = 0;
    for (Map.Entry<String, Integer> entry : wordCount.entrySet()) {
        double count = entry.getValue();
        if (count >= minTermCount) {
            Tuple2<Integer, Double> idWeight = wordIdWeight.get(entry.getKey());
            indexes[pos] = idWeight.f0;
            values[pos++] = featureType.featureValueFunc.apply(idWeight.f1, count, tokenRatio);
        }
    }
    return new SparseVector(featureNum, Arrays.copyOf(indexes, pos), Arrays.copyOf(values, pos));
}
 
public static void checkBucketsOnServer(){
  PartitionedRegion region = (PartitionedRegion)cache.getRegion(PartitionedRegionName);
  HashMap localBucket2RegionMap = (HashMap)region
  .getDataStore().getSizeLocally();
  getLogWriter().info(
  "Size of the " + PartitionedRegionName + " in this VM :- "
      + localBucket2RegionMap.size());
  Set entrySet = localBucket2RegionMap.entrySet();
  assertNotNull(entrySet);
}
 
源代码15 项目: j2objc   文件: OldAndroidHashMapTest.java
public void testEntryIterator() throws Exception {
    HashMap<String, Integer> map = new HashMap<String, Integer>();

    boolean[] slots = new boolean[4];

    addItems(map);

    for (Object o : map.entrySet()) {
        int slot = 0;

        if (o.toString().equals("one=1")) {
            slot = 0;
        } else if (o.toString().equals("two=2")) {
            slot = 1;
        } else if (o.toString().equals("three=3")) {
            slot = 2;
        } else if (o.toString().equals("four=4")) {
            slot = 3;
        } else {
            fail("Unknown entry in HashMap");
        }

        if (slots[slot]) {
            fail("entry returned more than once");
        } else {
            slots[slot] = true;
        }
    }

    assertTrue(slots[0]);
    assertTrue(slots[1]);
    assertTrue(slots[2]);
    assertTrue(slots[3]);
}
 
/**
 * Test restoring from an empty state taken using a previous Flink version, when some partitions could be
 * found for topics.
 */
@Test
public void testRestoreFromEmptyStateWithPartitions() throws Exception {
	final List<KafkaTopicPartition> partitions = new ArrayList<>(PARTITION_STATE.keySet());

	final DummyFlinkKafkaConsumer<String> consumerFunction =
		new DummyFlinkKafkaConsumer<>(TOPICS, partitions, FlinkKafkaConsumerBase.PARTITION_DISCOVERY_DISABLED);

	StreamSource<String, DummyFlinkKafkaConsumer<String>> consumerOperator =
			new StreamSource<>(consumerFunction);

	final AbstractStreamOperatorTestHarness<String> testHarness =
			new AbstractStreamOperatorTestHarness<>(consumerOperator, 1, 1, 0);

	testHarness.setTimeCharacteristic(TimeCharacteristic.ProcessingTime);

	testHarness.setup();

	// restore state from binary snapshot file
	testHarness.initializeState(
		OperatorSnapshotUtil.getResourceFilename(
			"kafka-consumer-migration-test-flink" + testMigrateVersion + "-empty-state-snapshot"));

	testHarness.open();

	// the expected state in "kafka-consumer-migration-test-flink1.x-snapshot-empty-state";
	// all new partitions after the snapshot are considered as partitions that were created while the
	// consumer wasn't running, and should start from the earliest offset.
	final HashMap<KafkaTopicPartition, Long> expectedSubscribedPartitionsWithStartOffsets = new HashMap<>();
	for (KafkaTopicPartition partition : PARTITION_STATE.keySet()) {
		expectedSubscribedPartitionsWithStartOffsets.put(partition, KafkaTopicPartitionStateSentinel.EARLIEST_OFFSET);
	}

	// assert that there are partitions and is identical to expected list
	assertTrue(consumerFunction.getSubscribedPartitionsToStartOffsets() != null);
	assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());
	assertEquals(expectedSubscribedPartitionsWithStartOffsets, consumerFunction.getSubscribedPartitionsToStartOffsets());

	// the new partitions should have been considered as restored state
	assertTrue(consumerFunction.getRestoredState() != null);
	assertTrue(!consumerFunction.getSubscribedPartitionsToStartOffsets().isEmpty());
	for (Map.Entry<KafkaTopicPartition, Long> expectedEntry : expectedSubscribedPartitionsWithStartOffsets.entrySet()) {
		assertEquals(expectedEntry.getValue(), consumerFunction.getRestoredState().get(expectedEntry.getKey()));
	}

	consumerOperator.close();
	consumerOperator.cancel();
}
 
源代码17 项目: openprodoc   文件: ContribRes.java
static synchronized private String GenHtml(HttpServletRequest Req, DriverGeneric LocalSess, ContribConf ConfContrib, PDFolders FoldUser) throws Exception
{
String HtmlFinal;   
String Agent=Req.getHeader("User-Agent");
String DimHtml=ConfContrib.SolveHtmlRes(Agent);
if (DimHtml!=null) 
    {
    HtmlFinal=getHtml(LocalSess, DimHtml);
    }
else
    HtmlFinal=HtmlBase;
if (ConfContrib.getFormContribCSS()!=null)
    {
    if (ConfContrib.getFormContribCSS().startsWith("http"))    
       HtmlFinal=HtmlFinal.replace("@[email protected]", "<link rel=\"STYLESHEET\" type=\"text/css\" href=\""+ConfContrib.getFormContribCSS()+"\"/>");
    else
       HtmlFinal=HtmlFinal.replace("@[email protected]", GenCSS(LocalSess, ConfContrib.getFormContribCSS()));
    }
else
    HtmlFinal=HtmlFinal.replace("@[email protected]", "");
if (!ServletFileUpload.isMultipartContent(Req))
    {
    HtmlFinal=HtmlFinal.replace("@[email protected]", "<div class=\"CONTRIBRESKO\">ERROR:NO File<div>");    
    return(HtmlFinal);
    }
String NameDocT=null;
String FileName=null;
InputStream FileData=null;
HashMap <String, String>ListFields=new HashMap();
DiskFileItemFactory factory = new DiskFileItemFactory();
factory.setSizeThreshold(1000000);
ServletFileUpload upload = new ServletFileUpload(factory);
upload.setFileSizeMax(ConfContrib.getMaxSize());
List items = upload.parseRequest(Req);
Iterator iter = items.iterator();
while (iter.hasNext())
    {
    FileItem item = (FileItem) iter.next();
    if (item.isFormField())
        {
        if (item.getFieldName().equals("CONTRIB_DT"))    
            NameDocT=item.getString("UTF-8");
        else
            {
            ListFields.put(item.getFieldName(), item.getString("UTF-8"));
            }
        }
    else 
        {
        FileName=item.getName();
        FileData=item.getInputStream();
        }
    }   
if (!ConfContrib.IsAllowedExt(FileName.substring(FileName.lastIndexOf(".")+1)))
    {
    HtmlFinal=HtmlFinal.replace("@[email protected]", "<div class=\"CONTRIBRESKO\">ERROR:Not Allowed extension<div>");    
    return(HtmlFinal);
    }
PDDocs DocTmp=new PDDocs(LocalSess, NameDocT);
DocTmp.setName(FileName);
DocTmp.setStream(FileData);
Record AttrDef = DocTmp.getRecSum();
for (Map.Entry<String, String> entry : ListFields.entrySet())
    {
    if (AttrDef.getAttr(entry.getKey())!=null);
        AttrDef.getAttr(entry.getKey()).Import(entry.getValue());
    }
DocTmp.assignValues(AttrDef);
DocTmp.setParentId(FoldUser.getPDId());
DocTmp.setACL(FoldUser.getACL());
try {
DocTmp.insert();
HtmlFinal=HtmlFinal.replace("@[email protected]", "<div class=\"CONTRIBRESOK\">"+ConfContrib.getOKMsg()+"</div>");
HtmlFinal=HtmlFinal.replace("CONTRIBRETRYKO", "CONTRIBRETRYOK");
} catch (Exception Ex)
    {
    HtmlFinal=HtmlFinal.replace("@[email protected]", "<div class=\"CONTRIBRESKO\">ERROR:"+Ex.getLocalizedMessage()+"<div>");    
    }
return(HtmlFinal);
}
 
源代码18 项目: EntityAPI   文件: EntityBuilder.java
public EntityBuilder withBehaviours(HashMap<Behaviour, Integer> prioritisedBehaviours) {
    for (Map.Entry<Behaviour, Integer> entry : prioritisedBehaviours.entrySet()) {
        this.behaviours.put(entry.getKey(), entry.getValue());
    }
    return this;
}
 
源代码19 项目: PE-HFT-Java   文件: PortfolioData.java
public void setSettings(HashMap<String, String> settings) {
	
	for(Entry<String,String>  e: settings.entrySet()){
		
		setParam(e.getKey(),e.getValue());
		
	}		
	
}
 
源代码20 项目: systemsgenetics   文件: ConvertHpoToMatrix.java
/**
	 * @param args the command line arguments
	 * @throws java.io.IOException
	 * @throws java.lang.Exception
	 */
	public static void main(String[] args) throws IOException, Exception {

		//final File hpoFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\ALL_SOURCES_ALL_FREQUENCIES_diseases_to_genes_to_phenotypes.txt");
		
		final File ncbiToEnsgMapFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\ensgNcbiId.txt");
		final File hgncToEnsgMapFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\ensgHgnc.txt");
//		final File geneOrderFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\genes.txt");
//		
		final File hpoFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\ALL_SOURCES_ALL_FREQUENCIES_phenotype_to_genes.txt");
//		final File outputFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\" + hpoFile.getName() + "_matrix.txt.gz");
//		final File outputFile2 = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\" + hpoFile.getName() + "_genesInPathways.txt");

		final File geneOrderFile = new File("C:\\UMCG\\Genetica\\Projects\\Depict2Pgs\\testPredictions\\mergedGeneNetworkCoregBoth.rows.txt");
		final File outputFile = new File("C:\\UMCG\\Genetica\\Projects\\Depict2Pgs\\testPredictions\\" + hpoFile.getName() + "_CoregBoth_matrix.txt.gz");
		final File outputFile2 = new File("C:\\UMCG\\Genetica\\Projects\\Depict2Pgs\\testPredictions\\" + hpoFile.getName() + "_CoregBoth_genesInPathways.txt");


//		final File hpoFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\HPO\\135\\bavWithoutWilliams.txt");
//		final File outputFile = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\" + hpoFile.getName() + "_matrix.txt.gz");
//		final File outputFile2 = new File("C:\\UMCG\\Genetica\\Projects\\GeneNetwork\\Data31995Genes05-12-2017\\PCA_01_02_2018\\PathwayMatrix\\" + hpoFile.getName() + "_genesInPathways.txt");

		
		HashMap<String, ArrayList<String>> ncbiToEnsgMap = loadNcbiToEnsgMap(ncbiToEnsgMapFile);
		HashMap<String, ArrayList<String>> hgncToEnsgMap = loadHgncToEnsgMap(hgncToEnsgMapFile);

		HashMap<String, HashSet<String>> hpoToGenes = readHpoFile(hpoFile, ncbiToEnsgMap, hgncToEnsgMap);

		ArrayList<String> geneOrder = readGenes(geneOrderFile);

		System.out.println("Total HPO terms: " + hpoToGenes.size());
		System.out.println("Genes in order file: " + geneOrder.size());

		DoubleMatrixDataset<String, String> hpoMatrix = new DoubleMatrixDataset(geneOrder, hpoToGenes.keySet());

		HashSet<String> genesWithHpo = new HashSet<>(10000);
		BufferedWriter geneWriter = new BufferedWriter(new FileWriter(outputFile2));

		for (Map.Entry<String, HashSet<String>> hpoToGenesEntry : hpoToGenes.entrySet()) {

			String hpo = hpoToGenesEntry.getKey();

			for (String gene : hpoToGenesEntry.getValue()) {

				if (hpoMatrix.containsRow(gene)) {
					if (genesWithHpo.add(gene)) {
						//add to genes file if not already done
						geneWriter.write(gene);
						geneWriter.write('\n');
					}
					hpoMatrix.setElement(gene, hpo, 1);
				}

			}

		}

		geneWriter.close();
		hpoMatrix.save(outputFile);

		System.out.println("Genes in pathway: " + genesWithHpo.size());

	}