类org.apache.hadoop.hbase.protobuf.generated.ClientProtos源码实例Demo

下面列出了怎么用org.apache.hadoop.hbase.protobuf.generated.ClientProtos的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: phoenix   文件: KeyValueCodec.java
/**
 * Read a single {@link KeyValue} from the input stream - may either be a regular {@link KeyValue}
 * or an {@link IndexedKeyValue}.
 * @param in to read from
 * @return the next {@link KeyValue}, if one is available
 * @throws IOException if the next {@link KeyValue} cannot be read
 */
public static KeyValue readKeyValue(DataInput in) throws IOException {
  int length = in.readInt();
  // its a special IndexedKeyValue
  if (length == INDEX_TYPE_LENGTH_MARKER) {
    ImmutableBytesPtr indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in));
    byte[] mutationData = Bytes.readByteArray(in);
    ClientProtos.MutationProto mProto = ClientProtos.MutationProto.parseFrom(mutationData);
    Mutation mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto);
    IndexedKeyValue kv = null;
    if (mutation != null){
      kv = IndexedKeyValue.newIndexedKeyValue(indexTableName.copyBytesIfNecessary(), mutation);
    } else {
      kv = new IndexedKeyValue();
    }
    return kv;
  } else {
    return KeyValue.create(length, in);
  }
}
 
源代码2 项目: phoenix   文件: PhoenixInputSplit.java
@Override
public void readFields(DataInput input) throws IOException {
    int count = WritableUtils.readVInt(input);
    scans = Lists.newArrayListWithExpectedSize(count);
    for (int i = 0; i < count; i++) {
        byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)];
        input.readFully(protoScanBytes);
        ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
        Scan scan = ProtobufUtil.toScan(protoScan);
        scans.add(scan);
    }
    init();
}
 
源代码3 项目: phoenix   文件: PhoenixInputSplit.java
@Override
public void write(DataOutput output) throws IOException {
    Preconditions.checkNotNull(scans);
    WritableUtils.writeVInt(output, scans.size());
    for (Scan scan : scans) {
        ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
        byte[] protoScanBytes = protoScan.toByteArray();
        WritableUtils.writeVInt(output, protoScanBytes.length);
        output.write(protoScanBytes);
    }
}
 
源代码4 项目: phoenix   文件: PhoenixInputSplit.java
@Override
public void readFields(DataInput input) throws IOException {
    regionLocation = WritableUtils.readString(input);
    regionSize = WritableUtils.readVLong(input);
    int count = WritableUtils.readVInt(input);
    scans = Lists.newArrayListWithExpectedSize(count);
    for (int i = 0; i < count; i++) {
        byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)];
        input.readFully(protoScanBytes);
        ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
        Scan scan = ProtobufUtil.toScan(protoScan);
        scans.add(scan);
    }
    init();
}
 
源代码5 项目: phoenix   文件: PhoenixInputSplit.java
@Override
public void write(DataOutput output) throws IOException {
    WritableUtils.writeString(output, regionLocation);
    WritableUtils.writeVLong(output, regionSize);

    Preconditions.checkNotNull(scans);
    WritableUtils.writeVInt(output, scans.size());
    for (Scan scan : scans) {
        ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
        byte[] protoScanBytes = protoScan.toByteArray();
        WritableUtils.writeVInt(output, protoScanBytes.length);
        output.write(protoScanBytes);
    }
}
 
源代码6 项目: pentaho-hadoop-shims   文件: HadoopShim.java
@Override
public Class[] getHbaseDependencyClasses() {
  return new Class[] {
    HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class,
    ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class
  };
}
 
源代码7 项目: pentaho-hadoop-shims   文件: HadoopShim.java
public Class[] getHbaseDependencyClasses() {
  return new Class[] {
    HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class,
    JobUtil.class, TableMapper.class, FastLongHistogram.class, Snapshot.class,
    ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class
  };
}
 
源代码8 项目: pentaho-hadoop-shims   文件: HadoopShim.java
@Override
public Class[] getHbaseDependencyClasses() {
  return new Class[] {
    HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class,
    ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class
  };
}
 
源代码9 项目: pentaho-hadoop-shims   文件: HadoopShim.java
@Override
public Class[] getHbaseDependencyClasses() {
  return new Class[]{
    HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class,
    ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class
  };
}
 
源代码10 项目: spliceengine   文件: HOperationFactory.java
@Override
public DataScan readScan(ObjectInput in) throws IOException{
    byte[] bytes = new byte[in.readInt()];
    in.readFully(bytes);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return new HScan(ProtobufUtil.toScan(scan));
}
 
源代码11 项目: spliceengine   文件: TableScannerBuilderTest.java
@Override
protected DataScan readScan(ObjectInput in) throws IOException{
    byte[] bytes = new byte[in.readInt()];
    in.readFully(bytes);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return new HScan(ProtobufUtil.toScan(scan));
}
 
源代码12 项目: super-cloudops   文件: HfileBulkExporter.java
/**
 * Setup scan condition if necessary.
 * 
 * @param conf
 * @param line
 * @throws IOException
 */
public static void setScanIfNecessary(Configuration conf, CommandLine line) throws IOException {
	String startRow = line.getOptionValue("startRow");
	String endRow = line.getOptionValue("endRow");
	String startTime = line.getOptionValue("startTime");
	String endTime = line.getOptionValue("endTime");

	boolean enabledScan = false;
	Scan scan = new Scan();
	// Row
	if (isNotBlank(startRow)) {
		conf.set(TableInputFormat.SCAN_ROW_START, startRow);
		scan.setStartRow(Bytes.toBytes(startRow));
		enabledScan = true;
	}
	if (isNotBlank(endRow)) {
		Assert2.hasText(startRow, "Argument for startRow and endRow are used simultaneously");
		conf.set(TableInputFormat.SCAN_ROW_STOP, endRow);
		scan.setStopRow(Bytes.toBytes(endRow));
		enabledScan = true;
	}

	// Row TimeStamp
	if (isNotBlank(startTime) && isNotBlank(endTime)) {
		conf.set(TableInputFormat.SCAN_TIMERANGE_START, startTime);
		conf.set(TableInputFormat.SCAN_TIMERANGE_END, endTime);
		try {
			Timestamp stime = new Timestamp(Long.parseLong(startTime));
			Timestamp etime = new Timestamp(Long.parseLong(endTime));
			scan.setTimeRange(stime.getTime(), etime.getTime());
			enabledScan = true;
		} catch (Exception e) {
			throw new IllegalArgumentException(String.format("Illegal startTime(%s) and endTime(%s)", startTime, endTime), e);
		}
	}

	if (enabledScan) {
		ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
		log.info("All other SCAN configuration are ignored if\n"
				+ "		 * this is specified.See TableMapReduceUtil.convertScanToString(Scan)\n"
				+ "		 * for more details.");
		conf.set(TableInputFormat.SCAN, Base64.encodeBytes(proto.toByteArray()));
	}
}
 
源代码13 项目: beam   文件: HBaseIO.java
private void readObject(ObjectInputStream in) throws IOException {
  this.configuration = SerializableCoder.of(SerializableConfiguration.class).decode(in).get();
  this.tableId = StringUtf8Coder.of().decode(in);
  this.scan = ProtobufUtil.toScan(ClientProtos.Scan.parseDelimitedFrom(in));
}
 
源代码14 项目: beam   文件: HBaseResultCoder.java
@Override
public Result decode(InputStream inputStream) throws IOException {
  return ProtobufUtil.toResult(ClientProtos.Result.parseDelimitedFrom(inputStream));
}
 
源代码15 项目: Eagle   文件: ProtoBufConverter.java
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
源代码16 项目: eagle   文件: ProtoBufConverter.java
public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
源代码17 项目: eagle   文件: ProtoBufConverter.java
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
源代码18 项目: hgraphdb   文件: IndexTool.java
/**
 * Writes the given scan into a Base64 encoded string.
 *
 * @param scan  The scan to write out.
 * @return The scan saved in a Base64 encoded string.
 * @throws IOException When writing the scan fails.
 */
static String convertScanToString(Scan scan) throws IOException {
    ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
    return Base64.encodeBytes(proto.toByteArray());
}
 
源代码19 项目: Eagle   文件: ProtoBufConverter.java
/**
 *
 * @param scan
 * @return
 */
public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
源代码20 项目: spliceengine   文件: SpliceTableMapReduceUtil.java
/**
 * Converts the given Base64 string back into a Scan instance.
 *
 * @param base64  The scan details.
 * @return The newly created Scan instance.
 * @throws IOException When reading the scan instance fails.
 */
public static Scan convertStringToScan(String base64) throws IOException{
    byte[] bytes= Base64.getDecoder().decode(base64);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return ProtobufUtil.toScan(scan);
}
 
 类所在包
 类方法
 同包方法