org.apache.hadoop.mapreduce.Counter#getName ( )源码实例Demo

下面列出了org.apache.hadoop.mapreduce.Counter#getName ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hadoop   文件: EventWriter.java
static JhCounters toAvro(Counters counters, String name) {
  JhCounters result = new JhCounters();
  result.name = new Utf8(name);
  result.groups = new ArrayList<JhCounterGroup>(0);
  if (counters == null) return result;
  for (CounterGroup group : counters) {
    JhCounterGroup g = new JhCounterGroup();
    g.name = new Utf8(group.getName());
    g.displayName = new Utf8(group.getDisplayName());
    g.counts = new ArrayList<JhCounter>(group.size());
    for (Counter counter : group) {
      JhCounter c = new JhCounter();
      c.name = new Utf8(counter.getName());
      c.displayName = new Utf8(counter.getDisplayName());
      c.value = counter.getValue();
      g.counts.add(c);
    }
    result.groups.add(g);
  }
  return result;
}
 
源代码2 项目: big-c   文件: EventWriter.java
static JhCounters toAvro(Counters counters, String name) {
  JhCounters result = new JhCounters();
  result.name = new Utf8(name);
  result.groups = new ArrayList<JhCounterGroup>(0);
  if (counters == null) return result;
  for (CounterGroup group : counters) {
    JhCounterGroup g = new JhCounterGroup();
    g.name = new Utf8(group.getName());
    g.displayName = new Utf8(group.getDisplayName());
    g.counts = new ArrayList<JhCounter>(group.size());
    for (Counter counter : group) {
      JhCounter c = new JhCounter();
      c.name = new Utf8(counter.getName());
      c.displayName = new Utf8(counter.getDisplayName());
      c.value = counter.getValue();
      g.counts.add(c);
    }
    result.groups.add(g);
  }
  return result;
}
 
源代码3 项目: RDFS   文件: ProxyJobTracker.java
private static void incrementMetricsAndReset(
  MetricsRecord record, Counters counters) {
  // Now update metrics with the counters and reset the aggregate.
  for (Counters.Group group : counters) {
    String groupName = group.getName();
    for (Counter counter : group) {
      String name = groupName + "_" + counter.getName();
      name = name.replaceAll("[^a-zA-Z_]", "_").toLowerCase();
      record.incrMetric(name, counter.getValue());
    }
  }
  // Reset the aggregate counters.
  for (Counters.Group g : counters) {
    for (Counter c : g) {
      c.setValue(0);
    }
  }
  record.update();
}
 
源代码4 项目: datawave   文件: IngestMetricsSummaryLoader.java
private long writeCounterGroup(Context context, CounterGroup group, String row, String cf) throws IOException, InterruptedException {
    long counterTotal = 0;
    for (Counter c : group) {
        String counterName = c.getName();
        String count = Long.toString(c.getValue());
        context.write(makeKey(row, cf, counterName), makeValue(count));
        counterTotal += c.getValue();
    }
    return counterTotal;
}
 
源代码5 项目: datawave   文件: IngestMetricsSummaryLoader.java
private static String checkForIngestLabelOverride(Counters ingestJobCounters) {
    CounterGroup jobQueueName = ingestJobCounters.getGroup(IngestProcess.METRICS_LABEL_OVERRIDE.name());
    if (jobQueueName.size() > 0) {
        Counter myCounter = jobQueueName.iterator().next();
        return myCounter.getName();
    }
    return null;
}
 
源代码6 项目: datawave   文件: FlagMakerMetricsMapper.java
@Override
public void map(Text flagFile, Counters counters, Context context) throws IOException, InterruptedException {
    System.out.println("Received counters for job " + flagFile);
    
    log.info(counters);
    
    long endTime = counters.findCounter(InputFile.FLAGMAKER_END_TIME).getValue();
    long startTime = counters.findCounter(InputFile.FLAGMAKER_START_TIME).getValue();
    
    Mutation statsPersist = new Mutation("flagFile\u0000" + flagFile);
    statsPersist.put("", "", new Value(serializeCounters(counters)));
    context.write(null, statsPersist);
    
    // Breaking it down into individual counters... Can't get individual stats when batch-processing in the FlagMaker
    for (Counter c : counters.getGroup(InputFile.class.getSimpleName())) {
        Text outFile = new Text(c.getName());
        Mutation m = new Mutation(outFile);
        long fileTime = c.getValue();
        try {
            Counters cs = new Counters();
            cs.findCounter(InputFile.class.getSimpleName(), outFile.toString()).setValue(c.getValue());
            cs.findCounter(FlagFile.class.getSimpleName(), flagFile.toString()).increment(1);
            cs.findCounter(InputFile.FLAGMAKER_END_TIME).setValue(endTime);
            cs.findCounter(InputFile.FLAGMAKER_START_TIME).setValue(startTime);
            m.put(WritableUtil.getLong(endTime - fileTime), WritableUtil.getLong(endTime), new Value(serializeCounters(cs)));
            context.write(null, m);
        } catch (IOException e) {
            log.error("Could not add counters to mutation!!!", e);
        }
    }
}
 
源代码7 项目: hadoop   文件: TaskCounterGroupInfo.java
public TaskCounterGroupInfo(String name, CounterGroup group) {
  this.counterGroupName = name;
  this.counter = new ArrayList<TaskCounterInfo>();

  for (Counter c : group) {
    TaskCounterInfo cinfo = new TaskCounterInfo(c.getName(), c.getValue());
    this.counter.add(cinfo);
  }
}
 
源代码8 项目: big-c   文件: TaskCounterGroupInfo.java
public TaskCounterGroupInfo(String name, CounterGroup group) {
  this.counterGroupName = name;
  this.counter = new ArrayList<TaskCounterInfo>();

  for (Counter c : group) {
    TaskCounterInfo cinfo = new TaskCounterInfo(c.getName(), c.getValue());
    this.counter.add(cinfo);
  }
}
 
源代码9 项目: hadoop   文件: CounterInfo.java
public CounterInfo(Counter c, Counter mc, Counter rc) {
  this.name = c.getName();
  this.totalCounterValue = c.getValue();
  this.mapCounterValue = mc == null ? 0 : mc.getValue();
  this.reduceCounterValue = rc == null ? 0 : rc.getValue();
}
 
源代码10 项目: big-c   文件: CounterInfo.java
public CounterInfo(Counter c, Counter mc, Counter rc) {
  this.name = c.getName();
  this.totalCounterValue = c.getValue();
  this.mapCounterValue = mc == null ? 0 : mc.getValue();
  this.reduceCounterValue = rc == null ? 0 : rc.getValue();
}