下面列出了com.google.common.collect.TreeBasedTable#create ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public static Table<String, String, String> assignmentsToTable(
SortedMap<String, SortedSet<SingleWorkerAssignment<Step2bGoldReasonAnnotator.SentenceLabel>>> assignments)
{
TreeBasedTable<String, String, String> result = TreeBasedTable.create();
assignments.forEach((unitID, singleWorkerAssignments) -> {
singleWorkerAssignments.forEach(sentenceLabelSingleWorkerAssignment -> {
String workerID = sentenceLabelSingleWorkerAssignment.getWorkerID();
String label = sentenceLabelSingleWorkerAssignment.getLabel().toString();
// update the table
result.put(unitID, workerID, label);
});
});
return result;
}
public static Table<String, String, Long> loadTable(InputStream stream)
throws IOException
{
Table<String, String, Long> result = TreeBasedTable.create();
LineIterator lineIterator = IOUtils.lineIterator(stream, "utf-8");
while (lineIterator.hasNext()) {
String line = lineIterator.next();
System.out.println(line);
String[] split = line.split("\t");
String language = split[0];
String license = split[1];
Long documents = Long.valueOf(split[2]);
Long tokens = Long.valueOf(split[3]);
result.put(language, "docs " + license, documents);
result.put(language, "tokens " + license, tokens);
}
return result;
}
private static Collection<SRLParse> getPropbankSection(final String section) throws IOException {
final Table<String, Integer, TreebankParse> PTB = new PennTreebank().readCorpus(WSJ);
final Table<String, Integer, SRLParse> srlParses = SRLParse.parseCorpus(PTB,
Util.readFileLineByLine(new File(PROPBANK, "prop.txt")),
USING_NOMBANK ? Util.readFileLineByLine(NOMBANK) : null);
final Table<String, Integer, SRLParse> goldParses = TreeBasedTable.create();
for (final Cell<String, Integer, TreebankParse> cell : PTB.cellSet()) {
// Propbank files skip sentences with no SRL deps. Add a default
// empty parse for all sentences.
goldParses.put(cell.getRowKey(), cell.getColumnKey(), new SRLParse(cell.getValue().getWords()));
}
goldParses.putAll(srlParses);
final Collection<SRLParse> result = new ArrayList<>();
for (final Cell<String, Integer, SRLParse> entry : goldParses.cellSet()) {
if (entry.getRowKey().startsWith("wsj_" + section)) {
result.add(entry.getValue());
}
}
return result;
}
private Set<SourceSpecificContext> getRequiredSourcesFromLib() {
checkState(currentPhase == ModelProcessingPhase.SOURCE_PRE_LINKAGE,
"Required library sources can be collected only in ModelProcessingPhase.SOURCE_PRE_LINKAGE phase,"
+ " but current phase was %s", currentPhase);
final TreeBasedTable<String, Optional<Revision>, SourceSpecificContext> libSourcesTable = TreeBasedTable.create(
String::compareTo, Revision::compare);
for (final SourceSpecificContext libSource : libSources) {
final SourceIdentifier libSourceIdentifier = requireNonNull(libSource.getRootIdentifier());
libSourcesTable.put(libSourceIdentifier.getName(), libSourceIdentifier.getRevision(), libSource);
}
final Set<SourceSpecificContext> requiredLibs = new HashSet<>();
for (final SourceSpecificContext source : sources) {
collectRequiredSourcesFromLib(libSourcesTable, requiredLibs, source);
removeConflictingLibSources(source, requiredLibs);
}
return requiredLibs;
}
@Before
public void setUp() throws Exception {
rowMeta = new RowMeta();
data = TreeBasedTable.create();
variables = new Variables();
aggregates = Maps.newHashMap( default_aggregates );
}
@Before
public void setUp() throws Exception {
rowMeta = new RowMeta();
data = TreeBasedTable.create();
variables = new Variables();
aggregates = Maps.newHashMap( default_aggregates );
}
public static void main(String[] args)
throws Exception
{
final File csvFile = new File(
"mturk/annotation-task/21-pilot-stance-task.output.csv");
final File argumentsFile = new File(
"mturk/annotation-task/data/arguments-with-full-segmentation-rfd.xml.gz");
TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create();
for (int crowdSize = 1; crowdSize <= 9; crowdSize++) {
for (Double maceThreshold : Arrays.asList(0.85, 0.9, 0.95, 1.0)) {
// ten random repeats
for (int i = 0; i < 20; i++) {
Random random = new Random(i);
File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz");
File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz");
annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert1, maceThreshold,
new WorkerAssignmentFilterRandomized(18, 1, crowdSize, random));
annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert2, maceThreshold,
new WorkerAssignmentFilterRandomized(18, 2, crowdSize, random));
double kappa = computeKappa(crowdExpert1, crowdExpert2);
if (!table.contains(crowdSize, maceThreshold)) {
table.put(crowdSize, maceThreshold, new DescriptiveStatistics());
}
table.get(crowdSize, maceThreshold).addValue(kappa);
FileUtils.forceDelete(crowdExpert1);
FileUtils.forceDelete(crowdExpert2);
}
}
}
printTable(table);
}
private static TreeBasedTable<File, CandidateUnit, Integer> createStatisticsTable() {
return TreeBasedTable.create(
Comparator.naturalOrder(),
(o1, o2) -> {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareTo(name2);
});
}
private static Table<Long, Long, BigDecimal> parsePerformanceTable() throws IOException {
Table<Long, Long, BigDecimal> policyConcurrencyValueTable = TreeBasedTable.create();
List<String> lines = Files.readLines(outputFile(), Charsets.UTF_8);
Splitter splitter = Splitter.on(";");
// the 0th. line contains the header, skipping that.
for (int i = 1; i < lines.size(); i++) {
Iterable<String> values = splitter.split(lines.get(i));
Long policies = Long.valueOf(get(values, 0));
Long concurrency = Long.valueOf(get(values, 1));
BigDecimal averageValue = new BigDecimal(get(values, 2));
policyConcurrencyValueTable.put(policies, concurrency, averageValue);
}
return policyConcurrencyValueTable;
}
@Test
public void givenTreeTable_whenGet_returnsSuccessfully() {
final Table<String, String, Integer> universityCourseSeatTable = TreeBasedTable.create();
universityCourseSeatTable.put("Mumbai", "Chemical", 120);
universityCourseSeatTable.put("Mumbai", "IT", 60);
universityCourseSeatTable.put("Harvard", "Electrical", 60);
universityCourseSeatTable.put("Harvard", "IT", 120);
final int seatCount = universityCourseSeatTable.get("Mumbai", "IT");
assertThat(seatCount).isEqualTo(60);
}
@Before
public void setUp() throws Exception {
rowMeta = new RowMeta();
data = TreeBasedTable.create();
variables = new Variables();
aggregates = Maps.newHashMap( default_aggregates );
}
public static void main(String[] args)
throws Exception
{
final File csvFile = new File(
"mturk/annotation-task/31-pilot-reasons-task.output.csv");
final File argumentsFile = new File(
"mturk/annotation-task/data/arguments-with-full-segmentation-rfd.xml.gz");
TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create();
TreeBasedTable<Integer, Double, DescriptiveStatistics> tableAnnotatedArguments = TreeBasedTable
.create();
IntStream.range(1, 10).parallel().forEach(crowdSize -> {
// Arrays.asList(0.85, 0.9, 0.95, 1.0).parallelStream().forEach(maceThreshold -> {
Arrays.asList(0.94, 0.96, 0.98, 1.0).parallelStream().forEach(maceThreshold -> {
// ten random repeats
for (int i = 0; i < 20; i++) {
Random random = new Random(i);
try {
File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz");
File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz");
GoldEstimationResult goldEstimationResult1 = Step2bGoldReasonAnnotator
.annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert1,
maceThreshold,
new WorkerAssignmentFilterRandomized(18, 1, crowdSize,
random),
null);
GoldEstimationResult goldEstimationResult2 = Step2bGoldReasonAnnotator
.annotateWithGoldLabels(csvFile, argumentsFile, crowdExpert2,
maceThreshold,
new WorkerAssignmentFilterRandomized(18, 2, crowdSize,
random),
null);
synchronized (tableAnnotatedArguments) {
if (!tableAnnotatedArguments.contains(crowdSize, maceThreshold)) {
tableAnnotatedArguments
.put(crowdSize, maceThreshold,
new DescriptiveStatistics());
}
tableAnnotatedArguments.get(crowdSize, maceThreshold)
.addValue(goldEstimationResult1.annotatedInstances);
tableAnnotatedArguments.get(crowdSize, maceThreshold)
.addValue(goldEstimationResult2.annotatedInstances);
}
double kappa = computeKappa(crowdExpert1, crowdExpert2);
synchronized (table) {
if (!table.contains(crowdSize, maceThreshold)) {
table.put(crowdSize, maceThreshold,
new DescriptiveStatistics());
}
table.get(crowdSize, maceThreshold).addValue(kappa);
}
FileUtils.forceDelete(crowdExpert1);
FileUtils.forceDelete(crowdExpert2);
synchronized (table) {
System.out.println("===================================");
printTable(table);
System.out.println("===================================");
}
synchronized (tableAnnotatedArguments) {
printTable(tableAnnotatedArguments);
System.out.println("===================================");
}
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
printTable(table);
});
});
}
public static void main(String[] args)
throws Exception
{
final File csvFile = new File(
"mturk/annotation-task/95-validation-task-pilot-task.output.csv");
final File argumentsFile = new File(
"mturk/annotation-task/data/92-original-warrant-batch-0001-5000-2447-good-reason-claim-pairs.xml.gz");
TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create();
final int requiredAssignmentsSize = 18;
IntStream.range(7, 8).parallel().forEach(crowdSize -> {
Arrays.asList(0.95).parallelStream()
.forEach(maceThreshold -> {
// ten random repeats
for (int i = 0; i < 1; i++) {
Random random = new Random(i);
try {
File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz");
File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz");
SortedMap<String, String> goldEstimationResult1 = Step8bTaskValidationGoldAnnotator
.annotateWithGoldLabels(Collections.singletonList(csvFile),
argumentsFile, crowdExpert1, maceThreshold,
new WorkerAssignmentFilterRandomized(
requiredAssignmentsSize, 1, crowdSize,
random));
SortedMap<String, String> goldEstimationResult2 = Step8bTaskValidationGoldAnnotator
.annotateWithGoldLabels(Collections.singletonList(csvFile),
argumentsFile, crowdExpert2, maceThreshold,
new WorkerAssignmentFilterRandomized(
requiredAssignmentsSize, 2, crowdSize,
random));
double score = computeScore(goldEstimationResult1,
goldEstimationResult2);
synchronized (table) {
if (!table.contains(crowdSize, maceThreshold)) {
table.put(crowdSize, maceThreshold,
new DescriptiveStatistics());
}
table.get(crowdSize, maceThreshold).addValue(score);
}
FileUtils.forceDelete(crowdExpert1);
FileUtils.forceDelete(crowdExpert2);
synchronized (table) {
System.out.println("===================================");
printTable(table);
System.out.println("===================================");
}
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
printTable(table);
});
});
}
public static void main(String[] args)
throws Exception
{
final File csvFile = new File(
"mturk/annotation-task/80-aw-validation-pilot-task.output.csv");
final File argumentsFile = new File(
"mturk/annotation-task/data/71-alternative-warrants-batch-0001-5000-001-600aw-batch-2390reason-claim-pairs-with-distracting-reasons.xml.gz");
TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create();
final int requiredAssignmentsSize = 14;
IntStream.range(1, 8).parallel().forEach(crowdSize -> {
Arrays.asList(0.75, 0.80, 0.85, 0.9, 0.95, 1.0).parallelStream().forEach(maceThreshold -> {
// ten random repeats
for (int i = 0; i < 20; i++) {
Random random = new Random(i);
try {
File crowdExpert1 = File.createTempFile("crowd1", ".xml.gz");
File crowdExpert2 = File.createTempFile("crowd2", ".xml.gz");
SortedMap<String, String> goldEstimationResult1 = Step6bAlternativeWarrantValidationHITGoldAnnotator
.annotateWithGoldLabels(Collections.singletonList(csvFile),
Arrays.asList(argumentsFile), crowdExpert1, null, maceThreshold,
new WorkerAssignmentFilterRandomized(
requiredAssignmentsSize, 1, crowdSize, random));
SortedMap<String, String> goldEstimationResult2 = Step6bAlternativeWarrantValidationHITGoldAnnotator
.annotateWithGoldLabels(Collections.singletonList(csvFile),
Arrays.asList(argumentsFile), crowdExpert2, null, maceThreshold,
new WorkerAssignmentFilterRandomized(
requiredAssignmentsSize, 2, crowdSize, random));
double kappa = computeKappa(goldEstimationResult1, goldEstimationResult2);
synchronized (table) {
if (!table.contains(crowdSize, maceThreshold)) {
table.put(crowdSize, maceThreshold, new DescriptiveStatistics());
}
table.get(crowdSize, maceThreshold).addValue(kappa);
}
FileUtils.forceDelete(crowdExpert1);
FileUtils.forceDelete(crowdExpert2);
synchronized (table) {
System.out.println("===================================");
printTable(table);
System.out.println("===================================");
}
}
catch (Exception ex) {
throw new RuntimeException(ex);
}
}
// System.out.println("Kappas:");
// for (Map.Entry<Integer, Double> entry : kappas.entrySet()) {
// System.out.printf("%d\t%.2f%n", entry.getKey(), entry.getValue());
// }
printTable(table);
});
});
}
public static void main(String[] args)
throws Exception
{
final File csvFile = new File(
"mturk/annotation-task/60-pilot-reason-disambiguation-task.output.csv");
TreeBasedTable<Integer, Double, DescriptiveStatistics> table = TreeBasedTable.create();
for (int crowdSize = 5; crowdSize <= 5; crowdSize++) {
for (Double maceThreshold : Arrays.asList(0.85, 0.9, 0.95, 1.0)) {
// ten random repeats
for (int i = 0; i < 20; i++) {
Random random = new Random(i);
SortedMap<String, String> gold1 = Step4bReasonDisambiguationGoldAnnotator
.annotateWithGoldLabels(csvFile, null, null,
maceThreshold,
// new WorkerAssignmentsFilterSubsetByTime(0, crowdSize, true));
new WorkerAssignmentFilterRandomized(18, 1, crowdSize, random));
SortedMap<String, String> gold2 = Step4bReasonDisambiguationGoldAnnotator
.annotateWithGoldLabels(csvFile, null, null,
maceThreshold,
// new WorkerAssignmentsFilterSubsetByTime(crowdSize, crowdSize * 2,
// false));
new WorkerAssignmentFilterRandomized(18, 2, crowdSize, random));
gold1 = filterOutNullValueEntries(gold1);
gold2 = filterOutNullValueEntries(gold2);
double kappa = computeKappa(gold1, gold2);
if (!table.contains(crowdSize, maceThreshold)) {
table.put(crowdSize, maceThreshold, new DescriptiveStatistics());
}
table.get(crowdSize, maceThreshold).addValue(kappa);
}
}
}
printTable(table);
}
private Table<String, String, String> getDummyGuavaTable() {
Table<String, String, String> aTable = TreeBasedTable.create();
aTable.put("firstRow", "firstColumn", "baeldung");
return aTable;
}