下面列出了org.apache.hadoop.io.DoubleWritable#get ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
public HashMap<String,Double> scanResults(Iterable<Entry<Key,Value>> data) {
HashMap<String,Double> fieldWeights = new HashMap<>();
final DoubleWritable vWeight = new DoubleWritable();
for (Entry<Key,Value> kv : data) {
if (log.isDebugEnabled()) {
log.debug("Received key " + kv.getKey().toStringNoTime());
}
Text field = kv.getKey().getRow();
Double weight = null;
try {
vWeight.readFields(new DataInputStream(new ByteArrayInputStream(kv.getValue().get())));
weight = vWeight.get();
} catch (IOException e) {
log.error("Could not parse value for " + field, e);
continue;
}
fieldWeights.put(field.toString(), weight);
}
return fieldWeights;
}
@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages) throws IOException {
float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);
long step = getSuperstep();
if (step == 0) {
//set initial value
logger.debug("Superstep is 0: Setting the default value.");
vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
} else { // go until no one votes to continue
double rank = 0;
for (DoubleWritable partial : messages) {
rank += partial.get();
}
rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
double vertexValue = vertex.getValue().get();
double delta = Math.abs(rank - vertexValue) / vertexValue;
aggregate(MAX_EPSILON, new DoubleWritable(delta));
vertex.setValue(new DoubleWritable(rank));
logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
}
distributeRank(vertex);
}
@Override
public void reduce(
final LongWritable key,
final Iterable<DoubleWritable> values,
final Context context) throws IOException, InterruptedException {
double summer = 0;
double winter = 0;
for (final DoubleWritable v : values) {
if (v.get() < 0) {
winter = -v.get();
} else {
summer = v.get();
}
}
context.write(new ComparisonCellData(summer, winter), key);
collectStats(key.get(), context);
}
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
wifiProb = text.toString();
IntWritable intReader = new IntWritable();
intReader.readFields(dataInput);
inNoOutWifi = intReader.get();
intReader.readFields(dataInput);
inNoOutStore = intReader.get();
intReader.readFields(dataInput);
outNoInWifi = intReader.get();
intReader.readFields(dataInput);
outNoInStore = intReader.get();
intReader.readFields(dataInput);
inAndOutWifi = intReader.get();
intReader.readFields(dataInput);
inAndOutStore = intReader.get();
intReader.readFields(dataInput);
stayInWifi = intReader.get();
intReader.readFields(dataInput);
stayInStore = intReader.get();
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.readFields(dataInput);
jumpRate = doubleWritable.get();
doubleWritable.readFields(dataInput);
deepVisit = doubleWritable.get();
doubleWritable.readFields(dataInput);
inStoreRate = doubleWritable.get();
}
@Override
public void compute(
Vertex<LongWritable, DoubleWritable, FloatWritable> vertex,
Iterable<DoubleWritable> messages) throws IOException {
if(maxSupersteps == -1){
maxSupersteps = (PageRankParameters.hasElement(PageRankParameters.PageRankEnum.ITERATION))?
PageRankParameters.getParameter(PageRankParameters.PageRankEnum.ITERATION):
MAX_SUPERSTEPS;
}
if (getSuperstep() >= 1) {
double sum = 0;
for (DoubleWritable message : messages) {
sum += message.get();
}
DoubleWritable vertexValue =
new DoubleWritable((0.15f / getTotalNumVertices()) + 0.85f * sum);
vertex.setValue(vertexValue);
aggregate(MAX_AGG, vertexValue);
aggregate(MIN_AGG, vertexValue);
aggregate(SUM_AGG, new LongWritable(1));
LOG.info(vertex.getId() + ": PageRank=" + vertexValue +
" max=" + getAggregatedValue(MAX_AGG) +
" min=" + getAggregatedValue(MIN_AGG));
}
if (getSuperstep() < maxSupersteps) {
long edges = vertex.getNumEdges();
sendMessageToAllEdges(vertex,
new DoubleWritable(vertex.getValue().get() / edges));
} else {
vertex.voteToHalt();
}
}
@Override
protected void reduce(LongWritable key, Iterable<DoubleWritable> values, Context context)
throws IOException, InterruptedException {
double totalRate = 0;
int activeMappers = 0;
for (DoubleWritable value : values) {
totalRate += value.get();
activeMappers++;
}
context.write(key, new Text(String.format("%f\t%d", totalRate, activeMappers)));
}
public void reduce(Text key, Iterable<DoubleWritable> values,
Context context)
throws IOException, InterruptedException {
double total = 0;
int instances = 0;
for (DoubleWritable stockPrice : values) {
total += stockPrice.get();
instances++;
}
outValue.set(total / (double) instances);
context.write(key, outValue);
}
@Override
protected void reduce(Text key, Iterable<DoubleWritable> recordings, Context context)
throws IOException, InterruptedException {
double maxPrice = Double.MIN_VALUE;
for (DoubleWritable recording : recordings) {
if (maxPrice < recording.get()) {
maxPrice = recording.get();
}
}
final StockWritable stock = new StockWritable();
stock.setStockName(key.toString());
stock.setMaxPrice(maxPrice);
context.write(NullWritable.get(), stock);
}
@Override
public void reduce(
final LongWritable key,
final Iterable<DoubleWritable> values,
final Context context) throws IOException, InterruptedException {
double sum = 0.0;
for (final DoubleWritable value : values) {
sum += value.get();
}
context.write(new DoubleWritable(sum), key);
collectStats(key, sum, context);
}
@Override
public void reduce(
final LongWritable key,
final Iterable<DoubleWritable> values,
final Context context) throws IOException, InterruptedException {
double s = 0.0;
for (final DoubleWritable value : values) {
s += value.get();
}
context.write(key, new DoubleWritable(s));
}
@Override
protected void map(
final LongWritable key,
final DoubleWritable value,
final org.apache.hadoop.mapreduce.Mapper.Context context)
throws IOException, InterruptedException {
long positiveKey = key.get();
double adjustedValue = value.get();
if (positiveKey < 0) {
positiveKey = -positiveKey - 1;
adjustedValue *= -1;
}
super.map(new LongWritable(positiveKey), new DoubleWritable(adjustedValue), context);
}
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
throws IOException, InterruptedException {
double sum = 0.0;
for (final DoubleWritable val : values) {
sum += val.get();
}
context.write(key, new DoubleWritable(sum));
}
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
throws IOException, InterruptedException {
double sum = 0.0;
int i = 0;
for (final DoubleWritable val : values) {
i++;
sum += val.get();
}
context.write(new IntWritable(i), new DoubleWritable(sum));
}
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
throws IOException, InterruptedException {
double sum = 0.0;
for (final DoubleWritable val : values) {
sum += val.get();
}
context.write(key, new DoubleWritable(sum));
}
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
throws IOException, InterruptedException {
double highestSale = 0.0;
double currentSale;
for (DoubleWritable val : values) {
currentSale = val.get();
if (highestSale < currentSale) {
highestSale = currentSale;
}
}
context.write(key, new DoubleWritable(highestSale));
}
@Override
public void readFields(DataInput in) throws IOException {
// 0. Timestamp
Text tms_text = new Text(tms);
tms_text.readFields(in);
tms = tms_text.toString();
// 1. integers
IntWritable intw = new IntWritable();
for (int i = 0; i < num.length; i++) {
intw.readFields(in);
num[i] = intw.get();
}
intw.readFields(in);
int1 = intw.get();
intw.readFields(in);
int2 = intw.get();
// 2. strings
Text txt = new Text();
for (int i = 0; i < strings.length; i++) {
txt.readFields(in);
strings[i] = txt.toString();
}
txt.readFields(in);
st1 = txt.toString();
// 3. doubles
DoubleWritable dw = new DoubleWritable();
for (int i = 0; i < dubs.length; i++) {
dw.readFields(in);
dubs[i] = dw.get();
}
dw.readFields(in);
db = dw.get();
// 4. floats
FloatWritable fw = new FloatWritable();
for (int i = 0; i < fts.length; i++) {
fw.readFields(in);
fts[i] = fw.get();
}
fw.readFields(in);
ft = fw.get();
// 5. longs
LongWritable lw = new LongWritable();
for (int i = 0; i < lngs.length; i++) {
lw.readFields(in);
lngs[i] = lw.get();
}
lw.readFields(in);
lng = lw.get();
// 6. booleans
BooleanWritable bw = new BooleanWritable();
for (int i = 0; i < bools.length; ++i) {
bw.readFields(in);
bools[i] = bw.get();
}
bw.readFields(in);
bool = bw.get();
// 7. shorts
ShortWritable sw = new ShortWritable();
for (int i = 0; i < shrts.length; ++i) {
sw.readFields(in);
shrts[i] = sw.get();
}
sw.readFields(in);
shrt = sw.get();
// 8. bytes
BytesWritable btsw = new BytesWritable();
btsw.readFields(in);
byte[] buffer = btsw.getBytes();
bts = new byte[btsw.getLength()];
for (int i = 0; i < btsw.getLength(); i++) {
bts[i] = buffer[i];
}
}
@Override
protected void reduce(
final DoubleWritable key,
final Iterable<LongWritable> values,
final Context context) throws IOException, InterruptedException {
if (key.get() < 0) {
final double prevMax = -key.get();
if (prevMax > max) {
max = prevMax;
}
} else {
final double value = key.get();
final double normalizedValue = value / max;
// for consistency give all cells with matching weight the same
// percentile
// because we are using a DoubleWritable as the key, the ordering
// isn't always completely reproducible as Double equals does not
// take into account an epsilon, but we can make it reproducible by
// doing a comparison with the previous value using an appropriate
// epsilon
final double percentile;
if (FloatCompareUtils.checkDoublesEqual(prevValue, value, WEIGHT_EPSILON)) {
percentile = prevPct;
} else {
percentile = (currentKey + 1.0) / totalKeys;
prevPct = percentile;
prevValue = value;
}
// calculate weights for this key
for (final LongWritable v : values) {
final long cellIndex = v.get() / numLevels;
final TileInfo tileInfo = fromCellIndexToTileInfo(cellIndex);
final WritableRaster raster =
RasterUtils.createRasterTypeDouble(NUM_BANDS, KDEJobRunner.TILE_SIZE);
raster.setSample(tileInfo.x, tileInfo.y, 0, key.get());
raster.setSample(tileInfo.x, tileInfo.y, 1, normalizedValue);
raster.setSample(tileInfo.x, tileInfo.y, 2, percentile);
context.write(
new GeoWaveOutputKey(coverageName, indexList.toArray(new String[0])),
RasterUtils.createCoverageTypeDouble(
coverageName,
tileInfo.tileWestLon,
tileInfo.tileEastLon,
tileInfo.tileSouthLat,
tileInfo.tileNorthLat,
MINS_PER_BAND,
MAXES_PER_BAND,
NAME_PER_BAND,
raster,
crsCode));
currentKey++;
}
}
}
@Override
public Double convert( ValueMetaInterface meta, DoubleWritable obj ) throws TypeConversionException {
return new Double( obj.get() );
}