com.google.common.collect.Table#rowKeySet ( )源码实例Demo

下面列出了com.google.common.collect.Table#rowKeySet ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: mynlp   文件: QuickStringIntTable.java
public QuickStringIntTable(Table<String, String, Integer> table) {
    ArrayList<String> labelList = Lists.newArrayList(table.rowKeySet());

    labelBase = findABase(labelList);
    labelSize = labelBase.length;


    data = new int[labelSize * labelSize];

    for (String rowKey : table.rowKeySet()) {
        for (String colKey : table.columnKeySet()) {
            int rowid = labelBase[rowKey.hashCode() % labelSize];
            int colid = labelBase[colKey.hashCode() % labelSize];

            data[rowid * labelSize + colid] = table.get(rowKey, colKey);
        }
    }
}
 
源代码2 项目: mynlp   文件: QuickStringDoubleTable.java
public QuickStringDoubleTable(Table<String, String, Double> table) {
    ArrayList<String> labelList = Lists.newArrayList(table.rowKeySet());

    labelBase = findABase(labelList);
    labelSize = labelBase.length;


    data = new double[labelSize * labelSize];

    for (String rowKey : table.rowKeySet()) {
        for (String colKey : table.columnKeySet()) {
            int rowid = labelBase[rowKey.hashCode() % labelSize];
            int colid = labelBase[colKey.hashCode() % labelSize];

            data[rowid * labelSize + colid] = table.get(rowKey, colKey);
        }
    }
}
 
源代码3 项目: fiat   文件: RedisPermissionsRepository.java
@Override
public Map<String, UserPermission> getAllById() {
  Table<String, ResourceType, Response<Map<String, String>>> responseTable = getAllFromRedis();
  if (responseTable == null) {
    return new HashMap<>(0);
  }

  Map<String, UserPermission> allById = new HashMap<>(responseTable.rowKeySet().size());

  RawUserPermission rawUnrestricted = new RawUserPermission(responseTable.row(UNRESTRICTED));
  UserPermission unrestrictedUser = getUserPermission(UNRESTRICTED, rawUnrestricted);
  Set<String> adminSet = getAllAdmins();

  for (String userId : responseTable.rowKeySet()) {
    RawUserPermission rawUser = new RawUserPermission(responseTable.row(userId));
    rawUser.isAdmin = adminSet.contains(userId);
    UserPermission permission = getUserPermission(userId, rawUser);
    allById.put(userId, permission.merge(unrestrictedUser));
  }
  return allById;
}
 
源代码4 项目: sequence-mining   文件: SequenceMiningCore.java
/** Pretty printing of sequence probabilities */
public static String probsToString(final Table<Sequence, Integer, Double> probs) {
	final StringBuilder sb = new StringBuilder();
	String prefix = "";
	sb.append("{");
	for (final Sequence seq : probs.rowKeySet()) {
		sb.append(prefix + seq + "=(");
		String prefix2 = "";
		for (final Double prob : probs.row(seq).values()) {
			sb.append(prefix2 + prob);
			prefix2 = ",";
		}
		sb.append(")");
		prefix = ",";
	}
	sb.append("}");
	return sb.toString();
}
 
private static LineChart buildChart(Table<Long, Long, BigDecimal> policyConcurrencyValueTable) {
	LineData lineData = new LineData();
	LineChart chart = new LineChart(lineData);
	for (Entry<Long, Map<Long, BigDecimal>> concurrencyKeyedEntry : policyConcurrencyValueTable.columnMap().entrySet()) {
		LineDataset dataset = new LineDataset()
				.setBackgroundColor(Color.TRANSPARENT)
				.setBorderColor(Color.random())
				.setLabel(String.format("%s client(s)", concurrencyKeyedEntry.getKey()))
				.setData(concurrencyKeyedEntry.getValue().values());
		lineData.addDataset(dataset);
	}
	
	for (Long policies : policyConcurrencyValueTable.rowKeySet()) {
		lineData.addLabels(String.format("Policies %s", policies));
	}
	return chart;
}
 
源代码6 项目: kurento-java   文件: BrowserTest.java
public void writeCSV(String outputFile, Table<Integer, Integer, String> resultTable)
    throws IOException {
  FileWriter writer = new FileWriter(outputFile);
  for (Integer row : resultTable.rowKeySet()) {
    boolean first = true;
    for (Integer column : resultTable.columnKeySet()) {
      if (!first) {
        writer.append(',');
      }
      String value = resultTable.get(row, column);
      if (value != null) {
        writer.append(value);
      }
      first = false;
    }
    writer.append('\n');
  }
  writer.flush();
  writer.close();
}
 
源代码7 项目: blueflood   文件: AstyanaxWriter.java
public void writeMetadata(Table<Locator, String, String> metaTable) throws ConnectionException {
    ColumnFamily cf = CassandraModel.CF_METRICS_METADATA;
    Timer.Context ctx = Instrumentation.getBatchWriteTimerContext(CassandraModel.CF_METRICS_METADATA_NAME);
    MutationBatch batch = keyspace.prepareMutationBatch();

    try {
        for (Locator locator : metaTable.rowKeySet()) {
            Map<String, String> metaRow = metaTable.row(locator);
            ColumnListMutation<String> mutation = batch.withRow(cf, locator);

            for (Map.Entry<String, String> meta : metaRow.entrySet()) {
                mutation.putColumn(meta.getKey(), meta.getValue(), StringMetadataSerializer.get(), null);
            }
        }
        try {
            batch.execute();
        } catch (ConnectionException e) {
            Instrumentation.markWriteError(e);
            log.error("Connection exception persisting metadata", e);
            throw e;
        }
    } finally {
        ctx.stop();
    }
}
 
源代码8 项目: qmq   文件: ActionCheckpointSerde.java
@Override
public byte[] toBytes(final ActionCheckpoint state) {
    final StringBuilder data = new StringBuilder();
    data.append(VERSION_V3).append(NEWLINE);
    data.append(state.getOffset()).append(NEWLINE);

    final Table<String, String, ConsumerGroupProgress> progresses = state.getProgresses();
    for (final String subject : progresses.rowKeySet()) {
        final Map<String, ConsumerGroupProgress> groups = progresses.row(subject);
        data.append(SLASH_JOINER.join(subject, groups.size())).append(NEWLINE);

        for (final String group : groups.keySet()) {
            final ConsumerGroupProgress progress = groups.get(group);
            final Map<String, ConsumerProgress> consumers = progress.getConsumers();
            final int consumerCount = consumers == null ? 0 : consumers.size();

            data.append(SLASH_JOINER.join(group, boolean2Short(progress.isBroadcast()), progress.getPull(), consumerCount)).append(NEWLINE);

            if (consumerCount <= 0) {
                continue;
            }

            consumers.values().forEach(consumer -> {
                data.append(SLASH_JOINER.join(consumer.getConsumerId(), consumer.getPull(), consumer.getAck())).append(NEWLINE);
            });
        }
    }
    return data.toString().getBytes(Charsets.UTF_8);
}
 
/**
 * Converts Guava table to a CSV table
 *
 * @param table                   table
 * @param csvFormat               CSV format
 * @param missingValuePlaceholder print if a value is missing (empty string by default)
 * @param <T>                     object type (string)
 * @return table
 * @throws IOException exception
 */
public static <T> String tableToCsv(Table<String, String, T> table, CSVFormat csvFormat,
        String missingValuePlaceholder)
        throws IOException
{
    StringWriter sw = new StringWriter();
    CSVPrinter printer = new CSVPrinter(sw, csvFormat);

    List<String> firstRow = new ArrayList<>();
    firstRow.add(" ");
    firstRow.addAll(table.columnKeySet());
    printer.printRecord(firstRow);

    for (String rowKey : table.rowKeySet()) {
        printer.print(rowKey);
        for (String columnKey : table.columnKeySet()) {
            T value = table.get(rowKey, columnKey);

            if (value == null) {
                printer.print(missingValuePlaceholder);
            }
            else {
                printer.print(value);
            }
        }
        printer.println();
    }

    printer.close();

    return sw.toString();
}
 
源代码10 项目: cachecloud   文件: RedisCenterImpl.java
private void fillAccumulationMap(Map<RedisConstant, Map<String, Object>> infoMap,
                                 Table<RedisConstant, String, Long> table) {
    if (table == null || table.isEmpty()) {
        return;
    }
    Map<String, Object> accMap = infoMap.get(RedisConstant.DIFF);
    if (accMap == null) {
        accMap = new LinkedHashMap<String, Object>();
        infoMap.put(RedisConstant.DIFF, accMap);
    }
    for (RedisConstant constant : table.rowKeySet()) {
        Map<String, Long> rowMap = table.row(constant);
        accMap.putAll(rowMap);
    }
}
 
源代码11 项目: tracecompass   文件: TmfAbstractToolTipHandler.java
@Override
public Point computePreferredSize() {
    Table<ToolTipString, ToolTipString, ToolTipString> model = getModel();
    int widestCat = 0;
    int widestKey = 0;
    int widestVal = 0;
    int totalHeight = 0;
    Set<ToolTipString> rowKeySet = model.rowKeySet();
    GC gc = new GC(Display.getDefault());
    for (ToolTipString row : rowKeySet) {
        if (!row.equals(UNCATEGORIZED)) {
            Point catExtent = gc.textExtent(row.toString());
            widestCat = Math.max(widestCat, catExtent.x);
            totalHeight += catExtent.y + 8;
        }
        Set<Entry<ToolTipString, ToolTipString>> entrySet = model.row(row).entrySet();
        for (Entry<ToolTipString, ToolTipString> entry : entrySet) {
            Point keyExtent = gc.textExtent(entry.getKey().toString());
            Point valExtent = gc.textExtent(entry.getValue().toString());
            widestKey = Math.max(widestKey, keyExtent.x);
            widestVal = Math.max(widestVal, valExtent.x);
            totalHeight += Math.max(keyExtent.y, valExtent.y) + 4;
        }
    }
    gc.dispose();
    int w = Math.max(widestCat, widestKey + CELL_PADDING + widestVal) + 2 * CONTENT_MARGIN + 2 * BODY_MARGIN;
    int h = totalHeight + 2 * CONTENT_MARGIN + 2 * BODY_MARGIN;
    Point scrollBarSize = getScrollbarSize(getParent());
    return new Point(w + scrollBarSize.x, h);
}
 
源代码12 项目: tracecompass   文件: TmfAbstractToolTipHandler.java
@Override
public Point create() {
    Composite parent = getParent();
    Table<ToolTipString, ToolTipString, ToolTipString> model = getModel();
    if (parent == null || model.size() == 0) {
        // avoid displaying empty tool tips.
        return null;
    }
    setupControl(parent);
    ScrolledComposite scrolledComposite = new ScrolledComposite(parent, SWT.H_SCROLL | SWT.V_SCROLL);
    scrolledComposite.setExpandVertical(true);
    scrolledComposite.setExpandHorizontal(true);
    setupControl(scrolledComposite);

    Composite composite = new Composite(scrolledComposite, SWT.NONE);
    fComposite = composite;
    composite.setLayout(new GridLayout(3, false));
    setupControl(composite);
    Set<ToolTipString> rowKeySet = model.rowKeySet();
    for (ToolTipString row : rowKeySet) {
        Set<Entry<ToolTipString, ToolTipString>> entrySet = model.row(row).entrySet();
        for (Entry<ToolTipString, ToolTipString> entry : entrySet) {
            Label nameLabel = new Label(composite, SWT.NO_FOCUS);
            nameLabel.setText(entry.getKey().toString());
            setupControl(nameLabel);
            Label separator = new Label(composite, SWT.NO_FOCUS | SWT.SEPARATOR | SWT.VERTICAL);
            GridData gd = new GridData(SWT.CENTER, SWT.CENTER, false, false);
            gd.heightHint = nameLabel.computeSize(SWT.DEFAULT, SWT.DEFAULT).y;
            separator.setLayoutData(gd);
            setupControl(separator);
            Label valueLabel = new Label(composite, SWT.NO_FOCUS);
            valueLabel.setText(entry.getValue().toString());
            setupControl(valueLabel);
        }
    }
    scrolledComposite.setContent(composite);
    Point preferredSize = computePreferredSize();
    scrolledComposite.setMinSize(preferredSize.x, preferredSize.y);
    return preferredSize;
}
 
源代码13 项目: sequence-mining   文件: Transaction.java
public void initializeCachedSequences(final Table<Sequence, Integer, Double> initProbs) {
	final Table<Sequence, Integer, Double> probs = HashBasedTable.create();
	for (final Sequence seq : initProbs.rowKeySet()) {
		if (this.contains(seq))
			probs.row(seq).putAll(initProbs.row(seq));
	}
	cachedSequences = probs;
}
 
/**
 * parse the ini file and return a map with all data
 * 
 * @param resourcePath
 *        The path of the input file
 * @param conf
 *        The configuration info
 * @return the result of sentry mapping data in map structure.
 */
@Override
public Map<String, Map<String, Set<String>>> parse(String resourcePath, Configuration conf)
    throws Exception {
  Map<String, Map<String, Set<String>>> resultMap = Maps.newHashMap();
  // SimpleFileProviderBackend is used for parse the ini file
  SimpleFileProviderBackend policyFileBackend = new SimpleFileProviderBackend(conf, resourcePath);
  ProviderBackendContext context = new ProviderBackendContext();
  context.setAllowPerDatabase(true);
  // parse the ini file
  policyFileBackend.initialize(context);

  // SimpleFileProviderBackend parsed the input file and output the data in Table format.
  Table<String, String, Set<String>> groupRolePrivilegeTable = policyFileBackend
      .getGroupRolePrivilegeTable();
  Map<String, Set<String>> groupRolesMap = Maps.newHashMap();
  Map<String, Set<String>> rolePrivilegesMap = Maps.newHashMap();
  for (String groupName : groupRolePrivilegeTable.rowKeySet()) {
    for (String roleName : groupRolePrivilegeTable.columnKeySet()) {
      // get the roles set for the current groupName
      Set<String> tempRoles = groupRolesMap.get(groupName);
      if (tempRoles == null) {
        tempRoles = Sets.newHashSet();
      }
      Set<String> privileges = groupRolePrivilegeTable.get(groupName, roleName);
      // if there has privilege for [group,role], if no privilege exist, the [group, role] info
      // will be discard.
      if (privileges != null) {
        // update [group, role] mapping data
        tempRoles.add(roleName);
        groupRolesMap.put(groupName, tempRoles);
        // update [role, privilege] mapping data
        rolePrivilegesMap.put(roleName, privileges);
      }
    }
  }
  resultMap.put(PolicyFileConstants.GROUPS, groupRolesMap);
  resultMap.put(PolicyFileConstants.ROLES, rolePrivilegesMap);
  return resultMap;
}
 
源代码15 项目: tutorials   文件: GuavaTableUnitTest.java
@Test
public void givenTable_whenRowKeySet_returnsSuccessfully() {
    final Table<String, String, Integer> universityCourseSeatTable = HashBasedTable.create();
    universityCourseSeatTable.put("Mumbai", "Chemical", 120);
    universityCourseSeatTable.put("Mumbai", "IT", 60);
    universityCourseSeatTable.put("Harvard", "Electrical", 60);
    universityCourseSeatTable.put("Harvard", "IT", 120);

    final Set<String> universitySet = universityCourseSeatTable.rowKeySet();

    assertThat(universitySet).hasSize(2);
}
 
源代码16 项目: blueflood   文件: AMetadataIOIntegrationTest.java
@Test
public void testBatchedMetaWritesAndReads() throws Exception {
    final AMetadataIO metadataIO = new AMetadataIO();
    Table<Locator, String, String> metaTable = HashBasedTable.create();
    final Set<Locator> locators = new HashSet<Locator>();
    MetadataCache cache = MetadataCache.getInstance();

    for (int i = 0; i < 10; i++) {
        Locator loc = Locator.createLocatorFromDbKey(
                "12345.rackspace.monitoring.enities.enFoo.check_type.agent.cpu.check.chBar.metric.met" + i);
        locators.add(loc);
        metaTable.put(loc, "key", "value");
    }

    metadataIO.putAll(metaTable); // Writes batch to disk

    Thread.sleep(2000); // wait 2s for batch timer to kick in.

    // Read it back.
    Table<Locator, String, String> metaRead = AstyanaxReader.getInstance().getMetadataValues(locators);

    // Assert that we wrote meta for 10 different locators.
    Assert.assertTrue(metaRead.size() == 10);

    for (Locator locator : metaRead.rowKeySet()) {
        Map<String, String> metaMapForLocator = metaRead.row(locator);

        Assert.assertTrue(metaMapForLocator.size() == 1);
        Assert.assertTrue(metaMapForLocator.get("key").equals("value"));
    }
}
 
源代码17 项目: levelup-java-examples   文件: TableExample.java
@Test
public void guava_table_example () {
	
	Random r = new Random(3000);
	
	Table<Integer, String, Workout> table = HashBasedTable.create();
	table.put(1, "Filthy 50", new Workout(r.nextLong()));
	table.put(1, "Fran", new Workout(r.nextLong()));
	table.put(1, "The Seven", new Workout(r.nextLong()));
	table.put(1, "Murph", new Workout(r.nextLong()));
	table.put(1, "The Ryan", new Workout(r.nextLong()));
	table.put(1, "King Kong", new Workout(r.nextLong()));
	
	table.put(2, "Filthy 50", new Workout(r.nextLong()));
	table.put(2, "Fran", new Workout(r.nextLong()));
	table.put(2, "The Seven", new Workout(r.nextLong()));
	table.put(2, "Murph", new Workout(r.nextLong()));
	table.put(2, "The Ryan", new Workout(r.nextLong()));
	table.put(2, "King Kong", new Workout(r.nextLong()));
	
	// for each row key
	for (Integer key : table.rowKeySet()) {
		
		logger.info("Person: " + key);

		for (Entry<String, Workout> row : table.row(key).entrySet()) {
			logger.info("Workout name: " + row.getKey() + " for elapsed time of " + row.getValue().getElapsedTime());
		}
	}
}
 
源代码18 项目: synthea   文件: DataStore.java
/**
 * Store a collection of Providers and their related information into this data store.
 * @param providers - collection of Providers to store.
 * @return Whether or not the entire collection of Providers was
 *     stored successfully (true) or not (false).
 */
public boolean store(Collection<? extends Provider> providers) {
  try (Connection connection = getConnection()) {
    // CREATE TABLE IF NOT EXISTS PROVIDER (id varchar, name varchar)
    PreparedStatement providerTable = connection
        .prepareStatement("INSERT INTO PROVIDER (id, name) VALUES (?,?);");

    // create table provider_attribute (provider_id varchar, name varchar, value varchar)
    PreparedStatement attributeTable = connection.prepareStatement(
        "INSERT INTO PROVIDER_ATTRIBUTE (provider_id, name, value) VALUES (?,?,?);");

    // CREATE TABLE IF NOT EXISTS UTILIZATION (provider_id varchar, encounters int, procedures
    // int, labs int, prescriptions int)
    PreparedStatement utilizationTable = connection.prepareStatement(
        "INSERT INTO UTILIZATION "
        + "(provider_id, year, encounters, procedures, labs, prescriptions) "
        + "VALUES (?,?,?,?,?,?)");

    // CREATE TABLE IF NOT EXISTS UTILIZATION_DETAIL (provider_id varchar, year int, category
    // string, value int)
    PreparedStatement utilizationDetailTable = connection.prepareStatement(
        "INSERT INTO UTILIZATION_DETAIL (provider_id, year, category, value) VALUES (?,?,?,?)");
    for (Provider p : providers) {
      String providerID = p.getResourceID();
      Map<String, Object> attributes = p.getAttributes();

      providerTable.setString(1, providerID);
      providerTable.setString(2, (String) attributes.get("name"));
      providerTable.addBatch();

      for (Object key : attributes.keySet()) {
        attributeTable.setString(1, providerID);
        attributeTable.setString(2, (String) key);
        attributeTable.setString(3, String.valueOf(attributes.get(key)));
        attributeTable.addBatch();
      }

      Table<Integer, String, AtomicInteger> u = p.getUtilization();
      for (Integer year : u.rowKeySet()) {
        utilizationTable.setString(1, providerID);
        utilizationTable.setInt(2, year);
        utilizationTable.setInt(3, pickUtilization(u, year, Provider.ENCOUNTERS));
        utilizationTable.setInt(4, pickUtilization(u, year, Provider.PROCEDURES));
        utilizationTable.setInt(5, pickUtilization(u, year, Provider.LABS));
        utilizationTable.setInt(6, pickUtilization(u, year, Provider.PRESCRIPTIONS));
        utilizationTable.addBatch();

        for (String category : u.columnKeySet()) {
          if (!category.startsWith(Provider.ENCOUNTERS)) {
            continue;
          }

          int count = pickUtilization(u, year, category);

          if (count == 0) {
            // don't bother storing 0 in the database
            continue;
          }

          utilizationDetailTable.setString(1, providerID);
          utilizationDetailTable.setInt(2, year);
          utilizationDetailTable.setString(3, category);
          utilizationDetailTable.setInt(4, count);
          utilizationDetailTable.addBatch();
        }
      }
    }

    for (int year = 1900; year <= Utilities.getYear(System.currentTimeMillis()); year++) {
      for (int t = 0; t < EncounterType.values().length; t++) {
        utilizationDetailTable.setString(1, "None");
        utilizationDetailTable.setInt(2, year);
        utilizationDetailTable.setString(3, EncounterType.values()[t].toString());
        utilizationDetailTable.setInt(4, 0);
        utilizationDetailTable.addBatch();
      }
    }

    providerTable.executeBatch();
    attributeTable.executeBatch();
    utilizationTable.executeBatch();
    utilizationDetailTable.executeBatch();
    connection.commit();
    return true;
  } catch (SQLException e) {
    e.printStackTrace();
    return false;
  }
}
 
源代码19 项目: tracecompass   文件: TmfAbstractToolTipHandler.java
@SuppressWarnings("nls")
        private String toHtml() {
            GC gc = new GC(Display.getDefault());
            FontData fontData = gc.getFont().getFontData()[0];
            String fontName = fontData.getName();
            String fontHeight = fontData.getHeight() + "pt";
            gc.dispose();
            Table<ToolTipString, ToolTipString, ToolTipString> model = getModel();
            StringBuilder toolTipContent = new StringBuilder();
            toolTipContent.append("<head>\n" +
                    "<meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n" +
                    "<style>\n" +
                    ".collapsible {\n" +
                    "  background-color: #777;\n" +
                    "  color: white;\n" +
//                    "  cursor: pointer;\n" + // Add when enabling JavaScript
                    "  padding: 0px;\n" +
                    "  width: 100%;\n" +
                    "  border: none;\n" +
                    "  text-align: left;\n" +
                    "  outline: none;\n" +
                    "  font-family: " + fontName +";\n" +
                    "  font-size: " + fontHeight + ";\n" +
                    "}\n" +
                    "\n" +
                    ".active, .collapsible:hover {\n" +
                    "  background-color: #555;\n" +
                    "}\n" +
                    "\n" +
                    ".content {\n" +
                    "  padding: 0px 0px;\n" +
                    "  display: block;\n" +
                    "  overflow: hidden;\n" +
                    "  background-color: #f1f1f1;\n" +
                    "}\n" +
                    ".tab {\n" +
                    "  padding:0px;\n" +
                    "  font-family: " + fontName + ";\n" +
                    "  font-size: " + fontHeight + ";\n" +
                    "}\n" +
                    ".leftPadding {\n" +
                    "  padding:0px 0px 0px " + CELL_PADDING + "px;\n" +
                    "}\n" +
                    ".bodystyle {\n" +
                    "  margin:" + BODY_MARGIN + "px;\n" +
                    "  padding:0px 0px;\n" +
                    "}\n" +
                    "</style>\n" +
                    "</head>");
            toolTipContent.append("<body class=\"bodystyle\">"); //$NON-NLS-1$

            toolTipContent.append("<div class=\"content\">");
            toolTipContent.append("<table class=\"tab\">");
            Set<ToolTipString> rowKeySet = model.rowKeySet();
            for (ToolTipString row : rowKeySet) {
                if (!row.equals(UNCATEGORIZED)) {
                    toolTipContent.append("<tr><th colspan=\"2\"><button class=\"collapsible\">").append(row.toHtmlString()).append("</button></th></tr>");
                }
                Set<Entry<ToolTipString, ToolTipString>> entrySet = model.row(row).entrySet();
                for (Entry<ToolTipString, ToolTipString> entry : entrySet) {
                    toolTipContent.append("<tr>");
                    toolTipContent.append("<td>");
                    toolTipContent.append(entry.getKey().toHtmlString());
                    toolTipContent.append("</td>");
                    toolTipContent.append("<td class=\"leftPadding\">");
                    toolTipContent.append(entry.getValue().toHtmlString());
                    toolTipContent.append("</td>");
                    toolTipContent.append("</tr>");
                }
            }
            toolTipContent.append("</table></div>");
            /* Add when enabling JavaScript
            toolTipContent.append("\n" +
                    "<script>\n" +
                    "var coll = document.getElementsByClassName(\"collapsible\");\n" +
                    "var i;\n" +
                    "\n" +
                    "for (i = 0; i < coll.length; i++) {\n" +
                    "  coll[i].addEventListener(\"click\", function() {\n" +
                    "    this.classList.toggle(\"active\");\n" +
                    "    var content = this.nextElementSibling;\n" +
                    "    if (content.style.display === \"block\") {\n" +
                    "      content.style.display = \"none\";\n" +
                    "    } else {\n" +
                    "      content.style.display = \"block\";\n" +
                    "    }\n" +
                    "  });\n" +
                    "}\n" +
                    "</script>");
            */
            toolTipContent.append("</body>"); //$NON-NLS-1$
            return toolTipContent.toString();
        }
 
源代码20 项目: sequence-mining   文件: IntervalClassification.java
public static void main(final String[] args) throws IOException {

		final String[] datasets = new String[] { "context", "auslan2", "pioneer", "aslbu", "skating", "aslgt" };
		final int[] topNs = new int[] { 10, 40, 70, 100 };
		final String baseFolder = "/afs/inf.ed.ac.uk/user/j/jfowkes/Code/Sequences/";
		final String datasetFolder = baseFolder + "Datasets/Intervals/";
		final String outFolder = baseFolder + "Classification/";

		for (int i = 0; i < datasets.length; i++) {
			final String dataset = datasets[i];

			System.out.println("===== Dataset: " + dataset + " =====");
			final File outFile = new File(outFolder + dataset + ".txt");
			final Writer writer = Files.newWriter(outFile, Charsets.UTF_8);
			writer.write("===== " + dataset + " =====\n");
			writer.write("topN: " + Arrays.toString(topNs) + "\n");

			// Read dataset
			final File dbFile = new File(datasetFolder + dataset + "/" + dataset + ".dat");
			final TransactionList dbTrans = SequenceMining.readTransactions(dbFile);
			final File labelFile = new File(datasetFolder + dataset + "/" + dataset + ".lab");

			// Read SQS seqs
			final File outSQS = new File(baseFolder + "SQS/" + dataset + ".txt");
			final Map<Sequence, Double> seqsSQS = StatisticalSequenceMining.readSQSSequences(outSQS);
			// seqsSQS = removeSingletons(seqsSQS);
			System.out.println("SQS: " + seqsSQS);
			writer.write(seqsSQS.size() + " SQS seqs \n");

			// Read GOKRIMP seqs
			final File outGOKRIMP = new File(baseFolder + "GoKrimp/" + dataset + ".txt");
			final Map<Sequence, Double> seqsGOKRIMP = StatisticalSequenceMining.readGoKrimpSequences(outGOKRIMP);
			// seqsGOKRIMP = removeSingletons(seqsGOKRIMP);
			System.out.println("GoKrimp: " + seqsGOKRIMP);
			writer.write(seqsGOKRIMP.size() + " GoKrimp seqs \n");

			// Read ISM seqs
			final File outISM = new File(baseFolder + "Logs/" + dataset + ".log");
			final Map<Sequence, Double> seqsISM = SequenceMining.readISMSequences(outISM);
			System.out.println("ISM: " + seqsISM);
			writer.write(seqsISM.size() + " ISM seqs \n");

			// Read BIDE seqs
			final File outBIDE = new File(baseFolder + "BIDE/" + dataset + ".txt");
			final Map<Sequence, Integer> seqsBIDE = FrequentSequenceMining.readFrequentSequences(outBIDE);
			// seqsBIDE = removeSingletons(seqsBIDE);
			System.out.println("BIDE: " + seqsBIDE);
			writer.write(seqsBIDE.size() + " BIDE seqs \n");

			// Generate simple features
			Map<Sequence, Double> seqsSingleton = new HashMap<>();
			final Table<Sequence, Integer, Double> singletons = SequenceMining
					.scanDatabaseToDetermineInitialProbabilities(dbFile);
			for (final Sequence seq : singletons.rowKeySet())
				seqsSingleton.put(seq, 1 - singletons.get(seq, 0));
			// Sort by support
			final Ordering<Sequence> comparator = Ordering.natural().reverse()
					.onResultOf(Functions.forMap(seqsSingleton)).compound(Ordering.usingToString());
			seqsSingleton = ImmutableSortedMap.copyOf(seqsSingleton, comparator);
			System.out.println("Singeltons: " + seqsSingleton);
			writer.write(seqsSingleton.size() + " Singletons seqs \n");

			// Classify
			final Multimap<String, Double> accuracy = ArrayListMultimap.create();
			for (final int n : topNs) {
				// Run MALLET Naive Bayes classifier
				accuracy.put("SQS", classify(n, seqsSQS, dbTrans, labelFile));
				accuracy.put("GoKrimp", classify(n, seqsGOKRIMP, dbTrans, labelFile));
				accuracy.put("ISM", classify(n, seqsISM, dbTrans, labelFile));
				accuracy.put("BIDE", classify(n, seqsBIDE, dbTrans, labelFile));
				accuracy.put("Singletons", classify(n, seqsSingleton, dbTrans, labelFile));
				// Run libSVM Linear classifier
				accuracy.put("SQS_SVM", classifySVM(n, seqsSQS, dbTrans, labelFile));
				accuracy.put("GoKrimp_SVM", classifySVM(n, seqsGOKRIMP, dbTrans, labelFile));
				accuracy.put("ISM_SVM", classifySVM(n, seqsISM, dbTrans, labelFile));
				accuracy.put("BIDE_SVM", classifySVM(n, seqsBIDE, dbTrans, labelFile));
				accuracy.put("Singletons_SVM", classifySVM(n, seqsSingleton, dbTrans, labelFile));
			}
			for (final String alg : accuracy.keySet())
				writer.write(alg + ": " + accuracy.get(alg) + "\n");
			writer.close();
		}
	}