类org.apache.hadoop.io.DoubleWritable源码实例Demo

下面列出了怎么用org.apache.hadoop.io.DoubleWritable的API类实例代码及写法,或者点击链接到github查看源代码。

/**
 * Determine the Hadoop writable type to pass Kettle type back to Hadoop as.
 *
 * @param kettleType
 * @return Java type to convert {@code kettleType} to when sending data back to Hadoop.
 */
public static Class<? extends Writable> getWritableForKettleType( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
源代码2 项目: CloverETL-Engine   文件: HadoopCloverConvert.java
@SuppressWarnings("rawtypes")
public static Class cloverType2Hadoop(DataFieldMetadata field) throws IOException{
	switch (field.getDataType()){
	case BOOLEAN:
		return BooleanWritable.class;
	case BYTE:
	case CBYTE:
		return BytesWritable.class;
	case DATE:
		return LongWritable.class;
	case INTEGER:
		return IntWritable.class;
	case LONG:
		return LongWritable.class;
	case NUMBER:
		return DoubleWritable.class;
	case STRING:
		return Text.class;
	default:
		throw new IOException(String.format("Unsupported CloverDX data type \"%s\" of field \"%s\" in conversion to Hadoop.",field.getDataType().getName(),field.getName()));
		
	}
}
 
源代码3 项目: spork   文件: L12.java
public void map(
        LongWritable k,
        Text val,
        OutputCollector<Text, DoubleWritable> oc,
        Reporter reporter) throws IOException {

    List<Text> fields = Library.splitLine(val, '');

    // Filter out null users or query terms.
    if (fields.get(0).getLength() == 0 ||
            fields.get(3).getLength() == 0) return;
    try {
        oc.collect(fields.get(0),
            new DoubleWritable(Double.valueOf(fields.get(6).toString())));
    } catch (NumberFormatException nfe) {
        oc.collect(fields.get(0), new DoubleWritable(0));
    }
}
 
源代码4 项目: incubator-gobblin   文件: OrcTestTools.java
/**
 * All Writable objects passed in here are guaranteed to be primitive writable objects.
 */
private boolean objCastHelper(Object javaObj, Writable obj) {
  if (obj instanceof IntWritable) {
    return ((IntWritable) obj).get() == (Integer) javaObj;
  } else if (obj instanceof Text) {
    return (obj).toString().equals(javaObj);
  } else if (obj instanceof LongWritable) {
    return ((LongWritable) obj).get() == (Long) javaObj;
  } else if (obj instanceof ShortWritable) {
    return ((ShortWritable) obj).get() == (Short) javaObj;
  } else if (obj instanceof DoubleWritable) {
    return ((DoubleWritable) obj).get() == (Double) javaObj;
  } else {
    throw new RuntimeException("Cannot recognize the writable type, please enrich the castHelper function");
  }
}
 
源代码5 项目: rheem   文件: PageRankAlgorithm.java
@Override
public void preSuperstep() {
    if (getSuperstep() >= 3) {
        LOG.info("aggregatedNumVertices=" +
                getAggregatedValue(SUM_AGG) +
                " NumVertices=" + getTotalNumVertices());
        if (this.<LongWritable>getAggregatedValue(SUM_AGG).get() !=
                getTotalNumVertices()) {
            throw new RuntimeException("wrong value of SumAggreg: " +
                    getAggregatedValue(SUM_AGG) + ", should be: " +
                    getTotalNumVertices());
        }
        DoubleWritable maxPagerank = getAggregatedValue(MAX_AGG);
        LOG.info("aggregatedMaxPageRank=" + maxPagerank.get());
        DoubleWritable minPagerank = getAggregatedValue(MIN_AGG);
        LOG.info("aggregatedMinPageRank=" + minPagerank.get());
    }
}
 
源代码6 项目: Eagle   文件: GenericAggregateQuery.java
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
	Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
	try {
		for(GroupbyKeyValue keyValue:entities){
			List<String> key = new ArrayList<String>();
			for(BytesWritable bw:keyValue.getKey().getValue()){
				key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
			}
			List<Double> value = new ArrayList<Double>();
			for(DoubleWritable wa:keyValue.getValue().getValue()){
				value.add(wa.get());
			}
			aggResultMap.put(key, value);
		}
	} catch (UnsupportedEncodingException e) {
		LOG.error(QueryConstants.CHARSET +" not support: "+e.getMessage(),e);
	}
	return aggResultMap;
}
 
源代码7 项目: Kylin   文件: RowValueDecoder.java
private void convertToJavaObjects(Object[] mapredObjs, Object[] results) {
    for (int i = 0; i < mapredObjs.length; i++) {
        Object o = mapredObjs[i];

        if (o instanceof LongWritable)
            o = ((LongWritable) o).get();
        else if (o instanceof IntWritable)
            o = ((IntWritable) o).get();
        else if (o instanceof DoubleWritable)
            o = ((DoubleWritable) o).get();
        else if (o instanceof FloatWritable)
            o = ((FloatWritable) o).get();

        results[i] = o;
    }
}
 
源代码8 项目: IntroToHadoopAndMR__Udacity_Course   文件: P1.java
public final static void main(final String[] args) throws Exception {
	final Configuration conf = new Configuration();

	final Job job = new Job(conf, "P1");
	job.setJarByClass(P1.class);

	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(DoubleWritable.class);

	job.setMapperClass(P1Map.class);
	job.setCombinerClass(P1Reduce.class);
	job.setReducerClass(P1Reduce.class);

	job.setInputFormatClass(TextInputFormat.class);
	job.setOutputFormatClass(TextOutputFormat.class);

	FileInputFormat.addInputPath(job, new Path(args[0]));
	FileOutputFormat.setOutputPath(job, new Path(args[1]));

	job.waitForCompletion(true);
}
 
@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages) throws IOException {

    float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);

    long step = getSuperstep();

    if (step == 0) {
        //set initial value
        logger.debug("Superstep is 0: Setting the default value.");
        vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
    } else { // go until no one votes to continue

        double rank = 0;
        for (DoubleWritable partial : messages) {
            rank += partial.get();
        }
        rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
        double vertexValue = vertex.getValue().get();
        double delta = Math.abs(rank - vertexValue) / vertexValue;
        aggregate(MAX_EPSILON, new DoubleWritable(delta));
        vertex.setValue(new DoubleWritable(rank));
        logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
    }
    distributeRank(vertex);
}
 
源代码10 项目: hiped2   文件: AvroMixedMapReduce.java
public void reduce(Text key,
                   Iterator<DoubleWritable> values,
                   OutputCollector<AvroWrapper<StockAvg>,
                       NullWritable> output,
                   Reporter reporter) throws IOException {

  Mean mean = new Mean();
  while (values.hasNext()) {
    mean.increment(values.next().get());
  }
  StockAvg avg = new StockAvg();
  avg.setSymbol(key.toString());
  avg.setAvg(mean.getResult());
  output.collect(new AvroWrapper<StockAvg>(avg),
      NullWritable.get());
}
 
源代码11 项目: eagle   文件: GenericCoprocessorAggregateQuery.java
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
    Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
    try {
        for (GroupbyKeyValue keyValue : entities) {
            List<String> key = new ArrayList<String>();
            for (BytesWritable bw : keyValue.getKey().getValue()) {
                key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
            }
            List<Double> value = new ArrayList<Double>();
            for (DoubleWritable wa : keyValue.getValue().getValue()) {
                value.add(wa.get());
            }
            aggResultMap.put(key, value);
        }
    } catch (UnsupportedEncodingException e) {
        LOG.error(QueryConstants.CHARSET + " not support: " + e.getMessage(), e);
    }
    return aggResultMap;
}
 
源代码12 项目: incubator-gobblin   文件: MRStressTest.java
@Override
public void run() {
  DescriptiveStatistics stats = this.limiter.getRateStatsSinceLastReport();
  long now = System.currentTimeMillis();
  this.runs++;

  if (stats != null) {
    long key;
    if (this.relativeKey) {
      key = 15 * this.runs;
    } else {
      DateTime nowTime = new DateTime(now).withMillisOfSecond(0);
      DateTime rounded = nowTime.withSecondOfMinute(15 * (nowTime.getSecondOfMinute() / 15));
      key = rounded.getMillis() / 1000;
    }


    try {
      this.context.write(new LongWritable(key), new DoubleWritable(stats.getSum()));
    } catch (IOException | InterruptedException ioe) {
      log.error("Error: ", ioe);
    }
  }

}
 
@SuppressWarnings("unchecked")
private<T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
	if(hadoopType == LongWritable.class ) {
		return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.Text.class) {
		return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.IntWritable.class) {
		return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.FloatWritable.class) {
		return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
		return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
		return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.ByteWritable.class) {
		return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
	}

	throw new RuntimeException("Unable to convert Stratosphere type ("+stratosphereType.getClass().getCanonicalName()+") to Hadoop.");
}
 
源代码14 项目: distributed-graph-analytics   文件: PageRankTest.java
@Test
public void testHighPageRankForOneNode() throws Exception {
    GiraphConfiguration conf = getConf();
    TestGraph<Text, DoubleWritable, Text> input = getHighPageRankGraph(conf);
    InMemoryVertexOutputFormat.initializeOutputGraph(conf);
    InternalVertexRunner.run(conf, input);
    TestGraph<Text, DoubleWritable, Text> output = InMemoryVertexOutputFormat.getOutputGraph();
    assertEquals(8, output.getVertices().size());
    assertTrue(output.getVertex(new Text("8")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("2")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("3")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("4")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("5")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("6")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("7")).getValue().get() < output.getVertex(new Text("1")).getValue().get());

}
 
public TextEdgeWriter<Text, DoubleWritable, Text> createEdgeWriter(final RecordWriter<Text, Text> rw) {
    return new TDTEdgeWriter() {
        @Override
        protected RecordWriter<Text, Text> createLineRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
            return rw;
        }
    };
}
 
private void aggregateQ(Double q) {
//		int aggregators = getNumQAggregators(getConf());
//		if (0 < aggregators) {
//			int modId = (int) this.getId().get() % aggregators;
			aggregate(ACTUAL_Q_AGG, new DoubleWritable(q));
//		}
	}
 
源代码17 项目: pxf   文件: RecordkeyAdapterTest.java
/**
 * Test convertKeyValue for Double type
 */
@Test
public void convertKeyValueDouble() {
    double key = 2.3;
    initRecordkeyAdapter();
    runConvertKeyValue(key, new DoubleWritable(key));
}
 
源代码18 项目: WIFIProbe   文件: CustomerFlowElement.java
public void write(DataOutput dataOutput) throws IOException {
    Text text = new Text(wifiProb==null?"":wifiProb);
    text.write(dataOutput);

    IntWritable intWritable = new IntWritable();

    intWritable.set(inNoOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inNoOutStore);
    intWritable.write(dataOutput);

    intWritable.set(outNoInWifi);
    intWritable.write(dataOutput);
    intWritable.set(outNoInStore);
    intWritable.write(dataOutput);

    intWritable.set(inAndOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inAndOutStore);
    intWritable.write(dataOutput);

    intWritable.set(stayInWifi);
    intWritable.write(dataOutput);
    intWritable.set(stayInStore);
    intWritable.write(dataOutput);

    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.set(jumpRate);
    doubleWritable.write(dataOutput);
    doubleWritable.set(deepVisit);
    doubleWritable.write(dataOutput);
    doubleWritable.set(inStoreRate);
    doubleWritable.write(dataOutput);
}
 
源代码19 项目: WIFIProbe   文件: CustomerFlowElement.java
public void readFields(DataInput dataInput) throws IOException {
    Text text = new Text();
    text.readFields(dataInput);
    wifiProb = text.toString();

    IntWritable intReader = new IntWritable();

    intReader.readFields(dataInput);
    inNoOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inNoOutStore = intReader.get();

    intReader.readFields(dataInput);
    outNoInWifi = intReader.get();
    intReader.readFields(dataInput);
    outNoInStore = intReader.get();


    intReader.readFields(dataInput);
    inAndOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inAndOutStore = intReader.get();

    intReader.readFields(dataInput);
    stayInWifi = intReader.get();
    intReader.readFields(dataInput);
    stayInStore = intReader.get();


    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.readFields(dataInput);
    jumpRate = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    deepVisit = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    inStoreRate = doubleWritable.get();

}
 
源代码20 项目: hadoop   文件: TypedBytesWritableInput.java
public Class<? extends Writable> readType() throws IOException {
  Type type = in.readType();
  if (type == null) {
    return null;
  }
  switch (type) {
  case BYTES:
    return BytesWritable.class;
  case BYTE:
    return ByteWritable.class;
  case BOOL:
    return BooleanWritable.class;
  case INT:
    return VIntWritable.class;
  case LONG:
    return VLongWritable.class;
  case FLOAT:
    return FloatWritable.class;
  case DOUBLE:
    return DoubleWritable.class;
  case STRING:
    return Text.class;
  case VECTOR:
    return ArrayWritable.class;
  case MAP:
    return MapWritable.class;
  case WRITABLE:
    return Writable.class;
  default:
    throw new RuntimeException("unknown type");
  }
}
 
源代码21 项目: incubator-gobblin   文件: MRStressTest.java
public static void main(String[] args) throws Exception {

    CommandLine cli = StressTestUtils.parseCommandLine(OPTIONS, args);

    Configuration configuration = new Configuration();
    if (cli.hasOption(THROTTLING_SERVER_URI.getOpt())) {
      configuration.setBoolean(USE_THROTTLING_SERVER, true);
      String resourceLimited = cli.getOptionValue(RESOURCE_ID_OPT.getOpt(), "MRStressTest");
      configuration.set(RESOURCE_ID, resourceLimited);
      configuration.set(
          BrokerConfigurationKeyGenerator.generateKey(new SharedRestClientFactory(),
              new SharedRestClientKey(RestliLimiterFactory.RESTLI_SERVICE_NAME),
              null, SharedRestClientFactory.SERVER_URI_KEY), cli.getOptionValue(THROTTLING_SERVER_URI.getOpt()));
    }

    if (cli.hasOption(LOCAL_QPS_OPT.getOpt())) {
      configuration .set(LOCALLY_ENFORCED_QPS, cli.getOptionValue(LOCAL_QPS_OPT.getOpt()));
    }

    Job job = Job.getInstance(configuration, "ThrottlingStressTest");
    job.getConfiguration().setBoolean("mapreduce.job.user.classpath.first", true);
    job.getConfiguration().setBoolean("mapreduce.map.speculative", false);

    job.getConfiguration().set(NUM_MAPPERS, cli.getOptionValue(NUM_MAPPERS_OPT.getOpt(), DEFAULT_MAPPERS));
    StressTestUtils.populateConfigFromCli(job.getConfiguration(), cli);

    job.setJarByClass(MRStressTest.class);
    job.setMapperClass(StresserMapper.class);
    job.setReducerClass(AggregatorReducer.class);
    job.setInputFormatClass(MyInputFormat.class);

    job.setOutputKeyClass(LongWritable.class);
    job.setOutputValueClass(DoubleWritable.class);
    FileOutputFormat.setOutputPath(job, new Path("/tmp/MRStressTest" + System.currentTimeMillis()));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
 
源代码22 项目: hiped2   文件: AvroParquetMapReduce.java
@Override
public void map(Void key,
                Stock value,
                Context context) throws IOException, InterruptedException {
  context.write(new Text(value.getSymbol().toString()),
      new DoubleWritable(value.getOpen()));
}
 
源代码23 项目: hive-third-functions   文件: UDFMathIsInfinite.java
public BooleanWritable evaluate(DoubleWritable num) {
    if (num == null) {
        result.set(false);
    } else {
        result.set(isInfinite(num.get()));
    }
    return result;
}
 
public DoubleWritable evaluate(double mean, double sd, double p) throws HiveException {
    checkCondition(p > 0 && p < 1, "p must be 0 > p > 1");
    checkCondition(sd > 0, "sd must > 0");

    result.set(mean + sd * 1.4142135623730951 * Erf.erfInv(2 * p - 1));
    return result;
}
 
源代码25 项目: Kylin   文件: DoubleSerializer.java
@Override
public DoubleWritable valueOf(byte[] value) {
    if (value == null)
        current.set(0d);
    else
        current.set(Double.parseDouble(Bytes.toString(value)));
    return current;
}
 
源代码26 项目: hive-third-functions   文件: UDFMathIsNaN.java
public BooleanWritable evaluate(DoubleWritable num) {
    if (num == null) {
        result.set(false);
    } else {
        result.set(isNaN(num.get()));
    }
    return result;
}
 
源代码27 项目: IntroToHadoopAndMR__Udacity_Course   文件: P1Q1.java
public final void map(final LongWritable key, final Text value, final Context context)
		throws IOException, InterruptedException {
	final String line = value.toString();
	final String[] data = line.trim().split("\t");
	if (data.length == 6) {
		final String product = data[3];
		final double sales = Double.parseDouble(data[4]);
		word.set(product);
		context.write(word, new DoubleWritable(sales));
	}
}
 
源代码28 项目: marklogic-contentpump   文件: InternalUtilities.java
/**
 * Create new XdmValue from value type and Writables.
 *  
 */
public static XdmValue newValue(ValueType valueType, Object value) {
    if (value instanceof Text) {
        return ValueFactory.newValue(valueType, ((Text)value).toString());
    } else if (value instanceof BytesWritable) {
        return ValueFactory.newValue(valueType, ((BytesWritable)value).getBytes());
    } else if (value instanceof IntWritable) {
        return ValueFactory.newValue(valueType, ((IntWritable)value).get());
    } else if (value instanceof LongWritable) {
        return ValueFactory.newValue(valueType, ((LongWritable)value).get());
    } else if (value instanceof VIntWritable) {
        return ValueFactory.newValue(valueType, ((VIntWritable)value).get());
    } else if (value instanceof VLongWritable) {
        return ValueFactory.newValue(valueType, ((VLongWritable)value).get());
    } else if (value instanceof BooleanWritable) {
        return ValueFactory.newValue(valueType, ((BooleanWritable)value).get());
    } else if (value instanceof FloatWritable) {
        return ValueFactory.newValue(valueType, ((FloatWritable)value).get());
    } else if (value instanceof DoubleWritable) {
        return ValueFactory.newValue(valueType, ((DoubleWritable)value).get());
    } else if (value instanceof MarkLogicNode) {
        return ValueFactory.newValue(valueType, ((MarkLogicNode)value).get());
    } else {
        throw new UnsupportedOperationException("Value " +  
                value.getClass().getName() + " is unsupported.");
    }
}
 
源代码29 项目: big-c   文件: TypedBytesWritableOutput.java
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}
 
源代码30 项目: IntroToHadoopAndMR__Udacity_Course   文件: P1Q2.java
public final void map(final LongWritable key, final Text value, final Context context)
		throws IOException, InterruptedException {
	final String line = value.toString();
	final String[] data = line.trim().split("\t");
	if (data.length == 6) {
		final String product = data[2];
		final double sales = Double.parseDouble(data[4]);
		word.set(product);
		context.write(word, new DoubleWritable(sales));
	}
}
 
 类所在包
 类方法
 同包方法