下面列出了java.util.LinkedHashMap#keySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
/**
* A helper util for generate signature
*
* @param paramMap The parameters for generation signature
* @return signature string
*/
public static String sign(LinkedHashMap<String, String> paramMap) {
if (null == paramMap) {
throw new RuntimeException("signature parameter can't null");
}
StringBuilder paramStr = new StringBuilder();
for (String key : paramMap.keySet()) {
paramStr.append(key);
paramStr.append(paramMap.get(key));
}
try {
String signatureStr = EncryptUtil.SHA512(paramStr.toString().getBytes(
ConstantsUnicode.UTF8));
return signatureStr.toUpperCase();
} catch (UnsupportedEncodingException e) {
logger.error(e.getMessage(), e);
}
return "";
}
public void registerPatternBlocks(final Map<String, String[]> patternBlocks)
throws GrokException {
HashMap<String, String> groupMapping = new HashMap<String, String>();
LinkedHashMap<String, String> strPatterns = new LinkedHashMap<String, String>();
for (String group : patternBlocks.keySet()) {
grokGroups.put(group, new LinkedHashMap<String, Grok>());
for (String block : patternBlocks.get(group)) {
for (String line : block.split("\n")) {
Matcher m = PATTERN_ENTRY.matcher(line);
if (m.matches()) {
strPatterns.put(m.group(1), m.group(2));
groupMapping.put(m.group(1), group);
}
}
}
}
for (String name : new ArrayList<String>(strPatterns.keySet())) {
// Could be already compiled by sub reference
if (strPatterns.containsKey(name)) {
compile(name, strPatterns.get(name), strPatterns, groupMapping);
}
}
logger.info("Registered grok patterns: {}", groks.keySet());
}
/**
*
* Write additional information about input datasets to a file
*
* @param file
* @param nameMap
* @throws IOException
*/
public static void writeInputDescription(File file, LinkedHashMap<String, String> nameMap) throws IOException {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(file))) {
writer.write("# Chipster dataset description file\n");
writer.write("# \n");
writer.write("# Additional columns may be added later, so don't assume that there will be only two of them.\n");
writer.write("# Comment lines are allowed only in the beginning of file, but the number of them may vary.\n");
writer.write("# Avoid using dataset names as file names on the server side, although those go through a cursory sanitization.\n");
writer.write("# \n");
writer.write("# INPUT_NAME DATASET_NAME\n");
for (String input : nameMap.keySet()) {
String name = nameMap.get(input);
if (name == null) {
throw new IllegalArgumentException("Dataset name of input '" + input + "' is null. Please rename the dataset.");
}
if (!name.matches(NAME_PATTERN)) {
throw new IllegalArgumentException("Dataset name " + name + " contains illegal characters. Please rename the dataset.");
}
writer.write(input + "\t" + name + "\n");
}
}
}
/**
* This function gets all the best time windows from spectra in water and meoh media, so that they can analyzed
* by the yeast media samples for snr analysis.
* @param waterAndMeohSpectra A list of ions to best XZ value.
* @return A map of ion to list of restricted time windows.
*/
public static Map<String, List<Double>> getRestrictedTimeWindowsForIonsFromWaterAndMeOHMedia(
List<LinkedHashMap<String, XZ>> waterAndMeohSpectra) {
Map<String, List<Double>> ionToRestrictedTimeWindows = new HashMap<>();
for (LinkedHashMap<String, XZ> entry : waterAndMeohSpectra) {
for (String ion : entry.keySet()) {
List<Double> restrictedTimes = ionToRestrictedTimeWindows.get(ion);
if (restrictedTimes == null) {
restrictedTimes = new ArrayList<>();
ionToRestrictedTimeWindows.put(ion, restrictedTimes);
}
Double timeValue = entry.get(ion).getTime();
restrictedTimes.add(timeValue);
}
}
return ionToRestrictedTimeWindows;
}
private Map<String, String> getOwnershipMap() {
Map<String, String> ownershipMap = null;
try {
ObjectName mObjNameStorageService = new ObjectName(
CassandraConstants.ORG_APACHE_CASSANDRA
+ "db:type="
+ CassandraConstants.Cassandra_JMX_Attributes.CASSANDRA_JMX_OBJECT_STORAGESERVICE);
Object attrOwnership = jmxUtil
.getAttribute(
mObjNameStorageService,
CassandraConstants.Cassandra_JMX_Attributes.CASSANDRA_JMX_ATTRIBUTE_OWNERSHIP);
LinkedHashMap ownership = (LinkedHashMap) attrOwnership;
ownershipMap = new HashMap<String, String>();
for (Object key : ownership.keySet()) {
ownershipMap
.put(key.toString().substring(
key.toString().lastIndexOf("/") + 1),
String.valueOf(df.format(((Float) ownership
.get(key)) * 100)) + " %");
}
} catch (MalformedObjectNameException e) {
LOG.error("Could not get ownership details.", e);
}
return ownershipMap;
}
public static Object getValueFromLinkedMap(LinkedHashMap<String, Object> map, int index) {
int currentIndex = 0;
for (String key : map.keySet()) {
if (currentIndex == index) {
return map.get(key);
}
currentIndex++;
}
return null;
}
public static String getKeyFromDataSource(Object[] dataSourceObjects, int index) {
LinkedHashMap<String, Object> map = (LinkedHashMap) dataSourceObjects[0];
int currentIndex = 0;
for (String key : map.keySet()) {
if (currentIndex == index) {
return key;
}
currentIndex++;
}
return null;
}
@Test(dataProvider = "getRenameCombinations")
public void testRenamingSamples(final Map<String, String> renamingMap, final int threads, final int batchSize) throws IOException {
final LinkedHashMap<String, String> sampleMap = new LinkedHashMap<>(renamingMap);
sampleMap.replaceAll( (newSampleName, originalSampleName)-> createInputVCF(originalSampleName).getAbsolutePath());
final File sampleMapFile = getSampleMapFile(sampleMap);
final String workspace = createTempDir("workspace").getAbsolutePath();
Files.delete(Paths.get(workspace));
final ArgumentsBuilder args = new ArgumentsBuilder()
.add(GenomicsDBImport.SAMPLE_NAME_MAP_LONG_NAME, sampleMapFile.getAbsolutePath())
.add(GenomicsDBImport.WORKSPACE_ARG_LONG_NAME, new File(workspace).getAbsolutePath())
.add(GenomicsDBImport.VCF_INITIALIZER_THREADS_LONG_NAME, String.valueOf(threads))
.add(GenomicsDBImport.BATCHSIZE_ARG_LONG_NAME, String.valueOf(batchSize))
.addInterval(INTERVAL.get(0));
runCommandLine(args);
final Set<String> expectedSampleNames = sampleMap.keySet();
try(final FeatureReader<VariantContext> reader = getGenomicsDBFeatureReader(workspace, b37_reference_20_21)) {
final CloseableTribbleIterator<VariantContext> iterator = reader.iterator();
Assert.assertTrue(iterator.hasNext(), "expected to see a variant");
Assert.assertTrue(expectedSampleNames.size() > 0);
Assert.assertEquals(expectedSampleNames.size(), renamingMap.size());
iterator.forEachRemaining(vc -> {
Assert.assertEquals(vc.getSampleNames().size(), expectedSampleNames.size());
Assert.assertEqualsNoOrder(vc.getSampleNames().toArray(), expectedSampleNames.toArray());
expectedSampleNames.forEach( sample -> {
Assert.assertEquals(vc.getGenotype(sample).getAnyAttribute(SAMPLE_NAME_KEY), renamingMap.get(sample));
//check another attribute just to make sure we're not mangling things
Assert.assertEquals(VariantContextGetters.getAttributeAsInt(vc.getGenotype(sample), ANOTHER_ATTRIBUTE_KEY, -1), 10);
});
});
}
}
public static LinkedHashMap<Integer, LinkedList<String>> cleanMap(LinkedHashMap<Integer, LinkedList<String>> map) {
LinkedHashMap<Integer, LinkedList<String>> output = new LinkedHashMap<Integer, LinkedList<String>>();
int i = 0;
for (Integer c : map.keySet()) {
if (!map.get(c).isEmpty()) {
output.put(i, map.get(c));
i++;
}
}
return output;
}
private TableEntry getTableEntry(String tableName, LinkedHashMap<String, String> tableOptions){
final TableEntry detector = new TableEntry();
detector.setTableName(tableName);
for (String tableHead: tableOptions.keySet()) {
detector.getTableHead().getRow().addCell(new TableCellEntry(tableHead));
}
TableRowEntry row = contructRow(tableOptions.values());
detector.getTableBody().addRow(row);
return detector;
}
/**
* Get a StateModelDefinition without transitions. The auto rebalancer doesn't take transitions
* into account when computing mappings, so this is acceptable.
* @param modelName name to give the model
* @param initialState initial state for all nodes
* @param states ordered map of state to count
* @return incomplete StateModelDefinition for rebalancing
*/
private StateModelDefinition getIncompleteStateModelDef(String modelName, String initialState,
LinkedHashMap<String, Integer> states) {
StateModelDefinition.Builder builder = new StateModelDefinition.Builder(modelName);
builder.initialState(initialState);
int i = states.size();
for (String state : states.keySet()) {
builder.addState(state, i);
builder.upperBound(state, states.get(state));
i--;
}
return builder.build();
}
/**
* If any not numeric value then categorical
* @param values
* @return
*/
private static boolean isCategorical(LinkedHashMap<String, Double> values) {
try {
for (String value : values.keySet()) {
Double.parseDouble(value);
}
return false;
} catch (Exception e) {
return true;
}
}
/**
* 从标准行情数据目录导入
*/
private void importFromDataDir() throws Exception
{
File marketData = TraderHomeUtil.getDirectory(TraderHomeUtil.DIR_MARKETDATA);
File trashDir = TraderHomeUtil.getDirectory(TraderHomeUtil.DIR_TRASH);
writer.println("从行情数据目录导入: "+marketData.getAbsolutePath());writer.flush();
for(File tradingDayDir: FileUtil.listSubDirs(marketData)) {
LocalDate date = DateUtil.str2localdate(tradingDayDir.getName());
if ( date==null ) {
writer.println("忽略目录 "+tradingDayDir);
continue;
}
writer.print("导入交易日 "+tradingDayDir.getName()+" :"); writer.flush();
LinkedHashMap<Exchangeable, List<MarketDataInfo>> marketDataInfos = loadMarketDataInfos(tradingDayDir);
List<Exchangeable> exchangeables = new ArrayList<>(marketDataInfos.keySet());
Collections.sort(exchangeables);
for(Exchangeable e:exchangeables) {
//为每个品种找到最合适的文件
List<MarketDataInfo> mdInfos = marketDataInfos.get(e);
Collections.sort(mdInfos);
//实际导入
MarketDataInfo mdInfo = mdInfos.get(mdInfos.size()-1);
importMarketData(date, mdInfo);
writer.print(" "+mdInfo.exchangeable+"("+mdInfo.savedTicks+"/"+mdInfo.tickCount+")"); writer.flush();
}
writer.println();
//将每日目录转移trash目录中
if ( moveToTrash ) {
moveToTrash(trashDir, tradingDayDir);
}
}
}
/**
* Create a key including the
* <li><b>expenditureFinancialDocumentNumber</b></li>
* <li><b>expenditureFinancialDocumentTypeCode</b></li>
* with accounting information for asset payment distribution
*
* Make sure the full accounting line information is part of the key
* chartOfAccount, accountNumber, subAccountNumber, objectCode, subObjectCode, projectCode
*
* @return
*/
public String getAssetPaymentDetailKey() {
LinkedHashMap<String,String> paymentMap = assetPaymentToStringMapper();
paymentMap.put("expenditureFinancialDocumentTypeCode",this.getExpenditureFinancialDocumentTypeCode());
paymentMap.put("expenditureFinancialDocumentNumber",this.getExpenditureFinancialDocumentNumber());
//use SHORT_PREFIX_STYLE so that memory address is not part of the toString output
ToStringBuilder builder = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
for (String key : paymentMap.keySet()){
builder.append(key, paymentMap.get(key));
}
return paymentMap.toString();
}
/**
* Creates various extra statements that we need.
* <p/>
* One of these is to create a constraint_failure() procedure, since MySQL has
* no way to abort a transaction other than performing an invalid query
* (constraint_failure() attempts an "UPDATE" of "fail = 1" on a table named
* whatever failure message is passed in; that way at least the error appears in
* the MySQL error message. This sucks, I know. Blame MySQL.)
* <p/>
* FIXME: Right now, MySQL <b>does not support</b> updating the same table a
* trigger applies to in an AFTER trigger, so we have no way to clear shadow
* columns. This was presumably a result of MySQL incompetence, or MySQL's
* philosophy of limiting itself in order to prevent idiot PHP programmers from
* shooting themselves in the foot with--*GASP*--a recursive trigger. While it
* would be very nice to clear the shadow columns, we simply can't do so: the
* values are often needed in AFTER INSERT or AFTER UPDATE triggers, so we can't
* clear them before then, however the only entry point for clearing these with
* MySQL is doing a 'SET NEW.col = NULL' inside the BEFORE trigger.
* <p/>
* If MySQL some day supports an update on the same table a trigger is applied
* to, the following will work. Until then, MySQL tables will end up retaining
* values in the shadow columns. N.B. that these values are actual references to
* the appropriate tables and columns, but are 'ON DELETE SET NULL ON UPDATE SET
* NULL', so should not pose data manipulation problems.
*
* @param toggleTriggers
*
* @return
*/
// DTRIG CF2012
@Override
public List<String> createExtras(boolean toggleTriggers) {
final List<String> extras = new LinkedList<>();
if (addFail) {
// This hack is needed for MySQL to abort an insert if a trigger
// fails.
// This is disgusting, but is what MySQL documentation recommends
// doing.
extras.addAll(delimit("$$",
"CREATE PROCEDURE constraint_failure(_message VARCHAR(255)) BEGIN" + lineSep
+ " -- This update is going to fail: this hack is needed because MySQL" + lineSep
+ " -- lacks the ability to do an (SQL-standard) SIGNAL from a procedure." + lineSep
+ " SET @sql = CONCAT('UPDATE `', _message, '` SET fail=1');" + lineSep
+ " PREPARE constraint_fail_statement_handle FROM @sql;" + lineSep
+ " EXECUTE contraint_fail_statement_handle;" + lineSep
+ " DEALLOCATE PREPARE contraint_fail_statement_handle;" + lineSep + "END"));
}
final List<StringBuilder> trigs = new LinkedList<>();
for (final EntityNode table : triggers.keySet()) {
final LinkedHashMap<String, LinkedList<String>> tableTriggers = triggers.get(table);
for (final String when : tableTriggers.keySet()) {
final LinkedList<String> actions = tableTriggers.get(when);
final StringBuilder commands = new StringBuilder(200);
commands.append("CREATE TRIGGER ")
.append(quoteId(table + "_" + when.replaceAll("(\\w)\\w*\\s*", "$1").toLowerCase() + "Trig"))
.append(' ').append(when).append(" ON ").append(quoteId(table)).append(" FOR EACH ROW")
.append(lineSep);
if (toggleTriggers) {
commands.append("\tIF (@DISABLE_TRIGGER IS NULL) THEN ").append(lineSep);
}
if (actions.size() == 1) {
commands.append(toggleTriggers ? "\t\t" : "\t").append(actions.getFirst()).append(";");
} else {
commands.append(toggleTriggers ? "\t\t" : "\t").append("BEGIN").append(lineSep);
for (final String action : actions) {
commands.append(toggleTriggers ? "\t\t\t" : "\t\t").append(action.replaceFirst(";\\s*$", ""))
.append(';').append(lineSep);
}
commands.append(toggleTriggers ? "\t\t" : "\t").append("END;");
}
if (toggleTriggers) {
commands.append(lineSep).append("\tEND IF;").append(lineSep);
}
trigs.add(commands);
}
}
if (!trigs.isEmpty()) {
extras.addAll(delimit("$$", trigs));
}
return extras;
}
public static LinkedHashMap<AirMapRule.Status,List<AirMapRule>> getRulesWithFlightFeatures(AirMapRuleset ruleset, AirMapEvaluation evaluation) {
LinkedHashMap<AirMapRule.Status,List<AirMapRule>> ruleStatusMap = new LinkedHashMap<>();
// pre-populate status for correct order
ruleStatusMap.put(AirMapRule.Status.Conflicting, new ArrayList<AirMapRule>());
ruleStatusMap.put(AirMapRule.Status.MissingInfo, new ArrayList<AirMapRule>());
ruleStatusMap.put(AirMapRule.Status.InformationRules, new ArrayList<AirMapRule>());
ruleStatusMap.put(AirMapRule.Status.NotConflicting, new ArrayList<AirMapRule>());
for (AirMapRule rule : ruleset.getRules()) {
List<AirMapRule> rules = new ArrayList<>();
if (ruleStatusMap.containsKey(rule.getStatus())) {
rules = ruleStatusMap.get(rule.getStatus());
}
AirMapRule evaluationRule = getRuleFromEvaluation(evaluation, rule);
for (AirMapFlightFeature flightFeature : CopyCollections.copy(rule.getFlightFeatures())) {
Analytics.logDebug("ruleset", ruleset.getId());
Analytics.logDebug("rule", rule.toString());
AirMapFlightFeature evaluationFlightFeature = getFlightFeatureFromEvaluation(evaluationRule, flightFeature);
if (evaluationFlightFeature == null) {
rule.getFlightFeatures().remove(flightFeature);
Timber.e("No match found for %s in evaluation", flightFeature.getFlightFeature());
continue;
}
boolean ruleIsFailingDueToInput = rule.getStatus() != AirMapRule.Status.NotConflicting && !evaluationFlightFeature.isCalculated();
boolean requiresInputBasedOnEvaluation = !evaluationFlightFeature.isCalculated() && evaluationRule.getStatus() != AirMapRule.Status.NotConflicting;
// replace flight feature with the one from evaluation (includes the question)
if (ruleIsFailingDueToInput || requiresInputBasedOnEvaluation) {
rule.getFlightFeatures().remove(flightFeature);
evaluationFlightFeature.setStatus(flightFeature.getStatus());
rule.getFlightFeatures().add(evaluationFlightFeature);
// otherwise hide flight feature
} else {
rule.getFlightFeatures().remove(flightFeature);
}
}
rules.add(rule);
ruleStatusMap.put(rule.getStatus(), rules);
}
// strip out empty sections
for (AirMapRule.Status status : new HashSet<>(ruleStatusMap.keySet())) {
if (ruleStatusMap.get(status).isEmpty()) {
ruleStatusMap.remove(status);
}
}
return ruleStatusMap;
}
/**
* Updates the host {@link IVisualPart}'s {@link IContentPart} children (see
* {@link IVisualPart#getChildrenUnmodifiable()}) so that it is in sync with
* the set of content children that is passed in.
*
* @param parent
* The parent {@link IVisualPart} whose content part children to
* synchronize against the given content children.
*
* @param contentChildren
* The list of content part children to be synchronized with the
* list of {@link IContentPart} children (
* {@link IContentPart#getChildrenUnmodifiable()}).
*
* @see IContentPart#getContentChildrenUnmodifiable()
* @see IContentPart#getChildrenUnmodifiable()
*/
public void synchronizeContentPartChildren(
IVisualPart<? extends Node> parent,
final List<? extends Object> contentChildren) {
if (contentChildren == null) {
throw new IllegalArgumentException(
"contentChildren may not be null");
}
List<IContentPart<? extends Node>> toRemove = new ArrayList<>();
Map<IVisualPart<? extends Node>, List<IContentPart<? extends Node>>> removalsPerParent = new LinkedHashMap<>();
detachAll(parent, Sets.newHashSet(contentChildren), toRemove,
removalsPerParent);
removalsPerParent.forEach((removeFrom, removeUs) -> {
removeFrom.removeChildren(removeUs);
});
for (IContentPart<? extends Node> cp : toRemove) {
disposeIfObsolete(cp);
}
LinkedHashMap<IVisualPart<? extends Node>, HashMultimap<Integer, IContentPart<? extends Node>>> addsPerParent = new LinkedHashMap<IVisualPart<? extends Node>, HashMultimap<Integer, IContentPart<? extends Node>>>();
List<IContentPart<? extends Node>> added = Lists.newArrayList();
List<ReorderData> reorders = Lists.newArrayList();
addAll(parent, contentChildren, added, addsPerParent, reorders);
ArrayList<IVisualPart<? extends Node>> parents = new ArrayList<IVisualPart<? extends Node>>(
addsPerParent.keySet());
for (int i = parents.size() - 1; i >= 0; i--) {
IVisualPart<? extends Node> parentContentPart = parents.get(i);
HashMultimap<Integer, IContentPart<? extends Node>> childContentParts = addsPerParent
.get(parentContentPart);
childContentParts.keySet().forEach(cp -> {
ArrayList<IContentPart<? extends Node>> children = Lists
.newArrayList(childContentParts.get(cp));
parentContentPart.addChildren(children, cp);
children.forEach(contentPart -> {
synchronizeContentPartAnchorages(contentPart,
contentPart.getContentAnchoragesUnmodifiable());
});
});
}
for (ReorderData rd : reorders) {
rd.parent.reorderChild(rd.child, rd.index);
}
}
/**
* e.g. download ftp://ftp.uniprot.org/pub/databases/uniprot/current_release/knowledgebase/complete/uniprot_trembl.fasta.gz
* and pass in path to local location of file
*
* @param args
*/
public static void main(String[] args) throws Exception {
int mb = 1024*1024;
//Getting the runtime reference from system
Runtime runtime = Runtime.getRuntime();
System.out.println("##### Heap utilization statistics [MB] #####");
//Print used memory
System.out.println("Used Memory:"
+ (runtime.totalMemory() - runtime.freeMemory()) / mb);
//Print free memory
System.out.println("Free Memory:"
+ runtime.freeMemory() / mb);
//Print total available memory
System.out.println("Total Memory:" + runtime.totalMemory() / mb);
//Print Maximum available memory
System.out.println("Max Memory:" + runtime.maxMemory() / mb);
if ( args.length < 1) {
System.err.println("First argument needs to be path to fasta file");
return;
}
File f = new File(args[0]);
if ( ! f.exists()) {
System.err.println("File does not exist " + args[0]);
return;
}
long timeS = System.currentTimeMillis();
// automatically uncompress files using InputStreamProvider
InputStreamProvider isp = new InputStreamProvider();
InputStream inStream = isp.getInputStream(f);
FastaReader<ProteinSequence, AminoAcidCompound> fastaReader = new FastaReader<ProteinSequence, AminoAcidCompound>(
inStream,
new GenericFastaHeaderParser<ProteinSequence, AminoAcidCompound>(),
new ProteinSequenceCreator(AminoAcidCompoundSet.getAminoAcidCompoundSet()));
LinkedHashMap<String, ProteinSequence> b;
int nrSeq = 0;
while ((b = fastaReader.process(100)) != null) {
for (String key : b.keySet()) {
nrSeq++;
System.out.println(nrSeq + " : " + key + " " + b.get(key));
if ( nrSeq % 100000 == 0)
System.out.println(nrSeq );
}
}
long timeE = System.currentTimeMillis();
System.out.println("parsed a total of " + nrSeq + " TREMBL sequences! in " + (timeE - timeS));
}
private void paintTable(Graphics g) {
if(kmf == null)
return;
KMFigureInfo kmfi = kmf.getKMFigureInfo();
Graphics2D g2 = (Graphics2D) g;
g2.setStroke(kmfi.kmStroke);
SurvFitInfo sfi = kmf.getSurvivalFitInfo();
LinkedHashMap<String, StrataInfo> sfiHashMap = new LinkedHashMap<String, StrataInfo>();
if(sfi.isWeighted()){
sfiHashMap = sfi.getUnweightedStrataInfoHashMap();
}else{
sfiHashMap = sfi.getStrataInfoHashMap();
}
if(sfiHashMap.size() == 0)
return;
//int height = this.getHeight();
int row = 0;
int left = kmf.getLeft();
//int right = kmf.getRight();
//int width = right - left;
Font f = g2.getFont();
Font nf = new Font(f.getName(), Font.BOLD, f.getSize());
g2.setFont(nf);
FontMetrics fm = getFontMetrics(nf);
int index = 0;
int fontHeight = getFontMetrics(getFont()).getHeight();
int increment = fontHeight;
ArrayList<Double> xaxisTimeValues = kmf.getxAxisTimeValues();
ArrayList<Integer> xAxisTimeCoordinates = kmf.getxAxisTimeCoordinates();
ArrayList<String> labels = new ArrayList<String>(sfiHashMap.keySet());
Collections.sort(labels);
for (String group : labels) {
row = row + increment;
g2.setColor(kmfi.getColor(index));
index++;
g2.drawLine(15, row - fontHeight/2, left - 5, row - fontHeight/2);
g2.setColor(Color.BLACK);
StrataInfo si = sfiHashMap.get(group);
if(kmf.title.toString().equals("[APOBEC1 Transhera Observation Arm]")){
//int dummy = 1;
}
// System.out.println(kmf.title + " Group " + group);
// System.out.println(si);
for(int i = 0; i < xaxisTimeValues.size(); i++){
Double time = xaxisTimeValues.get(i);
int xvalue = xAxisTimeCoordinates.get(i);
Double value = si.getNearestAtRisk(time);
String nrisk = "";
if(value == null){
nrisk = "";
}else{
nrisk = String.valueOf(value.intValue());
}
if(time == 0.0){
g2.drawString(nrisk , xvalue, row);
}else{
int w = fm.stringWidth(nrisk );
g2.drawString(nrisk , xvalue - w/2, row);
}
}
}
}
@Override
public boolean execute(final CommandSender sender, String label, final String[] args){
if(!this.plugin.isEnabled()) return false;
if(!sender.hasPermission("economyapi.command.topmoney")){
sender.sendMessage(TextFormat.RED + "You don't have permission to use this command.");
return false;
}
try{
final LinkedHashMap<String, Double> money = plugin.getAllMoney();
final Set<String> players = money.keySet();
final int page = args.length > 0 ? Math.max(1, Math.min(Integer.parseInt(args[0]), players.size())) : 1;
new Thread(){
public void run(){
List<String> list = new LinkedList<>();
for(String player:money.keySet()) list.add(player);
Collections.sort(list, new Comparator<String>(){
@Override
public int compare(String s1, String s2) {
double one = money.get(s1);
double two = money.get(s2);
return one < two ? 1 : one > two ? -1 : 0;
}
});
StringBuilder output = new StringBuilder();
output.append(plugin.getMessage("topmoney-tag", new String[]{Integer.toString(page), Integer.toString(((players.size() + 6) / 5))}, sender) + "\n");
int duplicate = 0;
double prev = -1D;
for(int n = 0; n < list.size(); n++){
double m = money.get(list.get(n));
if(m == prev) duplicate++;
else duplicate = 0;
prev = m;
int current = (int)Math.ceil((double)(n + 1) / 5);
if(page == current){
output.append(plugin.getMessage("topmoney-format", new String[]{Integer.toString(n + 1 - duplicate), list.get(n), Double.toString(m)}, sender) + "\n");
}else if(page < current){
break;
}
}
output.substring(0, output.length() - 1);
if(sender != null){
sender.sendMessage(output.toString());
}
}
}.start();
}catch(NumberFormatException e){
sender.sendMessage(TextFormat.RED + "Please provide a number.");
}
return true;
}