org.apache.hadoop.hbase.Cell#getRowLength ( )源码实例Demo

下面列出了org.apache.hadoop.hbase.Cell#getRowLength ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: phoenix   文件: TupleProjector.java
public ProjectedValueTuple projectResults(Tuple tuple, boolean useNewValueQualifier) {
    long maxTS = tuple.getValue(0).getTimestamp();
    int nCells = tuple.size();
    for (int i = 1; i < nCells; i++) {
        long ts = tuple.getValue(i).getTimestamp();
        if (ts > maxTS) {
            maxTS = ts;
        }
    }
    byte[] bytesValue = schema.toBytes(tuple, getExpressions(), valueSet, ptr);
    Cell base = tuple.getValue(0);
    if (useNewValueQualifier) {
        return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), maxTS, bytesValue, 0, bytesValue.length, valueSet.getEstimatedLength());
    } else {
        return new OldProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), maxTS, bytesValue, 0, bytesValue.length, valueSet.getEstimatedLength());
    }
}
 
源代码2 项目: Halyard   文件: HalyardBulkDelete.java
@Override
protected void map(ImmutableBytesWritable key, Result value, Context output) throws IOException, InterruptedException {
    for (Cell c : value.rawCells()) {
        Statement st = HalyardTableUtils.parseStatement(c, SVF);
        if ((subj == null || subj.equals(st.getSubject())) && (pred == null || pred.equals(st.getPredicate())) && (obj == null || obj.equals(st.getObject())) && (ctx == null || ctx.contains(st.getContext()))) {
            KeyValue kv = new KeyValue(c.getRowArray(), c.getRowOffset(), (int) c.getRowLength(),
                c.getFamilyArray(), c.getFamilyOffset(), (int) c.getFamilyLength(),
                c.getQualifierArray(), c.getQualifierOffset(), c.getQualifierLength(),
                c.getTimestamp(), KeyValue.Type.DeleteColumn, c.getValueArray(), c.getValueOffset(),
                c.getValueLength(), c.getTagsArray(), c.getTagsOffset(), c.getTagsLength());
            output.write(new ImmutableBytesWritable(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength()), kv);
            deleted++;
        } else {
            output.progress();
        }
        if (total++ % 10000l == 0) {
            String msg = MessageFormat.format("{0} / {1} cells deleted", deleted, total);
            output.setStatus(msg);
            LOG.log(Level.INFO, msg);
        }
    }

}
 
源代码3 项目: hbase   文件: HashTable.java
public void hashResult(Result result) {
  if (!batchStarted) {
    throw new RuntimeException("Cannot add to batch that has not been started.");
  }
  for (Cell cell : result.rawCells()) {
    int rowLength = cell.getRowLength();
    int familyLength = cell.getFamilyLength();
    int qualifierLength = cell.getQualifierLength();
    int valueLength = cell.getValueLength();
    digest.update(cell.getRowArray(), cell.getRowOffset(), rowLength);
    digest.update(cell.getFamilyArray(), cell.getFamilyOffset(), familyLength);
    digest.update(cell.getQualifierArray(), cell.getQualifierOffset(), qualifierLength);

    if (!ignoreTimestamps) {
      long ts = cell.getTimestamp();
      for (int i = 8; i > 0; i--) {
        digest.update((byte) ts);
        ts >>>= 8;
      }
    }
    digest.update(cell.getValueArray(), cell.getValueOffset(), valueLength);

    batchSize += rowLength + familyLength + qualifierLength + 8 + valueLength;
  }
}
 
源代码4 项目: hbase   文件: IntegrationTestBigLinkedList.java
@Override
protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
    String keyStr = Bytes.toStringBinary(row);
    try {
      LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
    } catch (IOException|InterruptedException e) {
      LOG.warn(e.toString(), e);
    }
    if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
      context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
    }
    context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
}
 
源代码5 项目: hbase   文件: IntegrationTestLoadAndVerify.java
@Override
protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
    String keyStr = Bytes.toStringBinary(row);
    try {
      LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
    } catch (IOException|InterruptedException e) {
      LOG.warn(e.toString(), e);
    }
    if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
      context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
    }
    context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
}
 
源代码6 项目: phoenix   文件: MetaDataUtil.java
public static void mutatePutValue(Put somePut, byte[] family, byte[] qualifier, byte[] newValue) {
    NavigableMap<byte[], List<Cell>> familyCellMap = somePut.getFamilyCellMap();
    List<Cell> cells = familyCellMap.get(family);
    List<Cell> newCells = Lists.newArrayList();
    if (cells != null) {
        for (Cell cell : cells) {
            if (Bytes.compareTo(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength(),
                qualifier, 0, qualifier.length) == 0) {
                Cell replacementCell = new KeyValue(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength(),
                    cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength(), cell.getQualifierArray(),
                    cell.getQualifierOffset(), cell.getQualifierLength(), cell.getTimestamp(),
                    KeyValue.Type.codeToType(cell.getType().getCode()), newValue, 0, newValue.length);
                newCells.add(replacementCell);
            } else {
                newCells.add(cell);
            }
        }
        familyCellMap.put(family, newCells);
    }
}
 
源代码7 项目: phoenix   文件: IndexHalfStoreFileReader.java
private boolean isSatisfiedMidKeyCondition(Cell kv) {
    if (CellUtil.isDelete(kv) && kv.getValueLength() == 0) {
        // In case of a Delete type KV, let it be going to both the daughter regions.
        // No problems in doing so. In the correct daughter region where it belongs to, this delete
        // tomb will really delete a KV. In the other it will just hang around there with no actual
        // kv coming for which this is a delete tomb. :)
        return true;
    }
    ImmutableBytesWritable rowKey =
            new ImmutableBytesWritable(kv.getRowArray(), kv.getRowOffset() + offset,
                    kv.getRowLength() - offset);
    Entry<ImmutableBytesWritable, IndexMaintainer> entry = indexMaintainers.entrySet().iterator().next();
    IndexMaintainer indexMaintainer = entry.getValue();
    byte[] viewIndexId = indexMaintainer.getViewIndexIdFromIndexRowKey(rowKey);
    IndexMaintainer actualIndexMaintainer = indexMaintainers.get(new ImmutableBytesWritable(viewIndexId));
    byte[] dataRowKey = actualIndexMaintainer.buildDataRowKey(rowKey, this.viewConstants);
    int compareResult = Bytes.compareTo(dataRowKey, splitRow);
    if (top) {
        if (compareResult >= 0) {
            return true;
        }
    } else {
        if (compareResult < 0) {
            return true;
        }
    }
    return false;
}
 
源代码8 项目: phoenix   文件: KeyValueUtil.java
/**
 * Binary search for latest column value without allocating memory in the process
 * @param kvBuilder TODO
 * @param kvs
 * @param family
 * @param qualifier
 */
public static Cell getColumnLatest(KeyValueBuilder kvBuilder, List<Cell>kvs, byte[] family, byte[] qualifier) {
    if (kvs.size() == 0) {
    	return null;
    }
    Cell row = kvs.get(0);
    Comparator<Cell> comp = new SearchComparator(kvBuilder, 
      row.getRowArray(), row.getRowOffset(), row.getRowLength(), family, qualifier);
    // pos === ( -(insertion point) - 1)
    int pos = Collections.binarySearch(kvs, null, comp);
    // never will exact match
    if (pos < 0) {
      pos = (pos+1) * -1;
      // pos is now insertion point
    }
    if (pos == kvs.size()) {
      return null; // doesn't exist
    }

    Cell kv = kvs.get(pos);
    if (Bytes.compareTo(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(),
            family, 0, family.length) != 0) {
        return null;
    }
    if (Bytes.compareTo(kv.getQualifierArray(), kv.getQualifierOffset(), kv.getQualifierLength(),
            qualifier, 0, qualifier.length) != 0) {
        return null;
    }
    return kv;
}
 
private boolean containedInScan(Cell kv) {
    byte[] rowArray = kv.getRowArray();
    int rowOffset = kv.getRowOffset();
    int rowLength = kv.getRowLength();
    if(Bytes.compareTo(scan.getStartRow(),0,scan.getStartRow().length,rowArray,rowOffset,rowLength)>0) return false;
    if(Bytes.compareTo(scan.getStopRow(),0,scan.getStopRow().length,rowArray,rowOffset,rowLength)<=0) return false;
    byte[] family = CellUtil.cloneFamily(kv);
    Map<byte[], NavigableSet<byte[]>> familyMap = scan.getFamilyMap();
    if(familyMap.size()<=0) return true;

    if(!familyMap.containsKey(family)) return false;
    NavigableSet<byte[]> qualifiersToFetch = familyMap.get(family);
    if(qualifiersToFetch.size()<=0) return true;
    return qualifiersToFetch.contains(CellUtil.cloneQualifier(kv));
}
 
源代码10 项目: hbase   文件: CodecPerformance.java
static int getRoughSize(final Cell [] cells) {
  int size = 0;
  for (Cell c: cells) {
    size += c.getRowLength() + c.getFamilyLength() + c.getQualifierLength() + c.getValueLength();
    size += Bytes.SIZEOF_LONG + Bytes.SIZEOF_BYTE;
  }
  return size;
}
 
源代码11 项目: spliceengine   文件: CellUtils.java
/**
 * Returns true if the specified KeyValue is contained by the specified range.
 */
public static boolean isKeyValueInRange(Cell kv, Pair<byte[], byte[]> range) {
    byte[] kvBuffer = kv.getRowArray(); // TODO JL SAR
    int rowKeyOffset = kv.getRowOffset();
    short rowKeyLength = kv.getRowLength();
    byte[] start = range.getFirst();
    byte[] stop = range.getSecond();
    return (start.length == 0 || Bytes.compareTo(start, 0, start.length, kvBuffer, rowKeyOffset, rowKeyLength) <= 0) &&
            (stop.length == 0 || Bytes.compareTo(stop, 0, stop.length, kvBuffer, rowKeyOffset, rowKeyLength) >= 0);
}
 
源代码12 项目: phoenix   文件: LocalIndexStoreFileScanner.java
private byte[] getNewRowkeyByRegionStartKeyReplacedWithSplitKey(Cell kv, boolean changeBottomKeys) {
    int lenOfRemainingKey = kv.getRowLength() - reader.getOffset();
    byte[] keyReplacedStartKey = new byte[lenOfRemainingKey + reader.getSplitRow().length];
    System.arraycopy(changeBottomKeys ? new byte[reader.getSplitRow().length] : reader.getSplitRow(), 0,
            keyReplacedStartKey, 0, reader.getSplitRow().length);
    System.arraycopy(kv.getRowArray(), kv.getRowOffset() + reader.getOffset(), keyReplacedStartKey,
            reader.getSplitRow().length, lenOfRemainingKey);
    return keyReplacedStartKey;
}
 
源代码13 项目: geowave   文件: HBaseDistributableFilter.java
protected ReturnCode applyFilter(final Cell cell) {
  final GeoWaveKeyImpl rowKey =
      new GeoWaveKeyImpl(
          cell.getRowArray(),
          partitionKeyLength,
          cell.getRowOffset(),
          cell.getRowLength());

  return applyFilter(rowKey);
}
 
源代码14 项目: spliceengine   文件: CellByteBufferArrayUtils.java
public static boolean matchingRowKeyValue(Cell keyValue, Cell other) {
    return !(keyValue == null || other == null || keyValue.getRowLength() != other.getRowLength()) &&
            ArrayUtil.equals(CellUtils.getBuffer(keyValue), keyValue.getRowOffset(), CellUtils.getBuffer(other),
                             other.getRowOffset(), other.getRowLength());
}
 
源代码15 项目: phoenix   文件: TupleProjector.java
public ProjectedValueTuple projectResults(Tuple tuple) {
	byte[] bytesValue = schema.toBytes(tuple, getExpressions(), valueSet, ptr);
	Cell base = tuple.getValue(0);
    return new ProjectedValueTuple(base.getRowArray(), base.getRowOffset(), base.getRowLength(), base.getTimestamp(), bytesValue, 0, bytesValue.length, valueSet.getEstimatedLength());
}
 
源代码16 项目: hbase   文件: SecureWALCellCodec.java
@Override
public void write(Cell cell) throws IOException {
  if (encryptor == null) {
    super.write(cell);
    return;
  }

  byte[] iv = nextIv();
  encryptor.setIv(iv);
  encryptor.reset();

  // TODO: Check if this is a cell for an encrypted CF. If not, we can
  // write a 0 here to signal an unwrapped cell and just dump the KV bytes
  // afterward

  StreamUtils.writeRawVInt32(out, iv.length);
  out.write(iv);

  // TODO: Add support for WAL compression

  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  OutputStream cout = encryptor.createEncryptionStream(baos);
  ByteBufferWriterOutputStream bos = new ByteBufferWriterOutputStream(cout);
  int tlen = cell.getTagsLength();
  // Write the KeyValue infrastructure as VInts.
  StreamUtils.writeRawVInt32(bos, KeyValueUtil.keyLength(cell));
  StreamUtils.writeRawVInt32(bos, cell.getValueLength());
  // To support tags
  StreamUtils.writeRawVInt32(bos, tlen);

  // Write row, qualifier, and family
  short rowLength = cell.getRowLength();
  StreamUtils.writeRawVInt32(bos, rowLength);
  PrivateCellUtil.writeRow(bos, cell, rowLength);
  byte familyLength = cell.getFamilyLength();
  StreamUtils.writeRawVInt32(bos, familyLength);
  PrivateCellUtil.writeFamily(bos, cell, familyLength);
  int qualifierLength = cell.getQualifierLength();
  StreamUtils.writeRawVInt32(bos, qualifierLength);
  PrivateCellUtil.writeQualifier(bos, cell, qualifierLength);
  // Write the rest ie. ts, type, value and tags parts
  StreamUtils.writeLong(bos, cell.getTimestamp());
  bos.write(cell.getTypeByte());
  PrivateCellUtil.writeValue(bos, cell, cell.getValueLength());
  if (tlen > 0) {
    PrivateCellUtil.writeTags(bos, cell, tlen);
  }
  bos.close();

  StreamUtils.writeRawVInt32(out, baos.size());
  baos.writeTo(out);

  // Increment IV given the final payload length
  incrementIv(baos.size());
}
 
源代码17 项目: phoenix   文件: SystemCatalogWALEntryFilter.java
private boolean isTenantRowCell(Cell cell) {
  ImmutableBytesWritable key =
      new ImmutableBytesWritable(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
  //rows in system.catalog that aren't tenant-owned will have a leading separator byte
  return key.get()[key.getOffset()] != QueryConstants.SEPARATOR_BYTE;
}
 
源代码18 项目: geowave   文件: MergingServerOp.java
protected Cell mergeList(final List<Cell> cells) {
  synchronized (MUTEX) {
    Mergeable currentMergeable = null;
    final Cell firstCell = cells.get(0);
    for (final Cell cell : cells) {
      final Mergeable mergeable =
          getMergeable(
              cell,
              // TODO consider avoiding extra byte array
              // allocations (which would require
              // persistence utils to be able to use
              // bytebuffer instead of byte[])
              CellUtil.cloneValue(cell));
      if (mergeable != null) {
        if (currentMergeable == null) {
          currentMergeable = mergeable;
        } else {
          currentMergeable.merge(mergeable);
        }
      }
    }
    final byte[] valueBinary = getBinary(currentMergeable);
    // this is basically a lengthy verbose form of cloning
    // in-place (without allocating new byte arrays) and
    // simply replacing the value with the new mergeable
    // value
    return new KeyValue(
        firstCell.getRowArray(),
        firstCell.getRowOffset(),
        firstCell.getRowLength(),
        firstCell.getFamilyArray(),
        firstCell.getFamilyOffset(),
        firstCell.getFamilyLength(),
        firstCell.getQualifierArray(),
        firstCell.getQualifierOffset(),
        firstCell.getQualifierLength(),
        firstCell.getTimestamp(),
        Type.codeToType(firstCell.getTypeByte()),
        valueBinary,
        0,
        valueBinary.length,
        firstCell.getTagsArray(),
        firstCell.getTagsOffset(),
        firstCell.getTagsLength());
  }
}
 
源代码19 项目: hbase   文件: DiffKeyDeltaEncoder.java
private int compressSingleKeyValue(DataOutputStream out, Cell cell, Cell prevCell)
    throws IOException {
  int flag = 0; // Do not use more bits that can fit into a byte
  int kLength = KeyValueUtil.keyLength(cell);
  int vLength = cell.getValueLength();

  long timestamp;
  long diffTimestamp = 0;
  int diffTimestampFitsInBytes = 0;
  int timestampFitsInBytes;
  int commonPrefix = 0;

  if (prevCell == null) {
    timestamp = cell.getTimestamp();
    if (timestamp < 0) {
      flag |= FLAG_TIMESTAMP_SIGN;
      timestamp = -timestamp;
    }
    timestampFitsInBytes = ByteBufferUtils.longFitsIn(timestamp);
    flag |= (timestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
    // put column family
    byte familyLength = cell.getFamilyLength();
    out.write(familyLength);
    PrivateCellUtil.writeFamily(out, cell, familyLength);
  } else {
    // Finding common prefix
    int preKeyLength = KeyValueUtil.keyLength(prevCell);
    commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(cell, prevCell, true, false);
    if (kLength == preKeyLength) {
      flag |= FLAG_SAME_KEY_LENGTH;
    }
    if (vLength == prevCell.getValueLength()) {
      flag |= FLAG_SAME_VALUE_LENGTH;
    }
    if (cell.getTypeByte() == prevCell.getTypeByte()) {
      flag |= FLAG_SAME_TYPE;
    }
    // don't compress timestamp and type using prefix encode timestamp
    timestamp = cell.getTimestamp();
    diffTimestamp = prevCell.getTimestamp() - timestamp;
    boolean negativeTimestamp = timestamp < 0;
    if (negativeTimestamp) {
      timestamp = -timestamp;
    }
    timestampFitsInBytes = ByteBufferUtils.longFitsIn(timestamp);
    boolean minusDiffTimestamp = diffTimestamp < 0;
    if (minusDiffTimestamp) {
      diffTimestamp = -diffTimestamp;
    }
    diffTimestampFitsInBytes = ByteBufferUtils.longFitsIn(diffTimestamp);
    if (diffTimestampFitsInBytes < timestampFitsInBytes) {
      flag |= (diffTimestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
      flag |= FLAG_TIMESTAMP_IS_DIFF;
      if (minusDiffTimestamp) {
        flag |= FLAG_TIMESTAMP_SIGN;
      }
    } else {
      flag |= (timestampFitsInBytes - 1) << SHIFT_TIMESTAMP_LENGTH;
      if (negativeTimestamp) {
        flag |= FLAG_TIMESTAMP_SIGN;
      }
    }
  }
  out.write(flag);
  if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
    ByteBufferUtils.putCompressedInt(out, kLength);
  }
  if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
    ByteBufferUtils.putCompressedInt(out, vLength);
  }
  ByteBufferUtils.putCompressedInt(out, commonPrefix);
  short rLen = cell.getRowLength();
  if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) {
    // Previous and current rows are different. Copy the differing part of
    // the row, skip the column family, and copy the qualifier.
    PrivateCellUtil.writeRowKeyExcludingCommon(cell, rLen, commonPrefix, out);
    PrivateCellUtil.writeQualifier(out, cell, cell.getQualifierLength());
  } else {
    // The common part includes the whole row. As the column family is the
    // same across the whole file, it will automatically be included in the
    // common prefix, so we need not special-case it here.
    // What we write here is the non common part of the qualifier
    int commonQualPrefix = commonPrefix - (rLen + KeyValue.ROW_LENGTH_SIZE)
        - (cell.getFamilyLength() + KeyValue.FAMILY_LENGTH_SIZE);
    PrivateCellUtil.writeQualifierSkippingBytes(out, cell, cell.getQualifierLength(),
      commonQualPrefix);
  }
  if ((flag & FLAG_TIMESTAMP_IS_DIFF) == 0) {
    ByteBufferUtils.putLong(out, timestamp, timestampFitsInBytes);
  } else {
    ByteBufferUtils.putLong(out, diffTimestamp, diffTimestampFitsInBytes);
  }

  if ((flag & FLAG_SAME_TYPE) == 0) {
    out.write(cell.getTypeByte());
  }
  PrivateCellUtil.writeValue(out, cell, vLength);
  return kLength + vLength + KeyValue.KEYVALUE_INFRASTRUCTURE_SIZE;
}
 
源代码20 项目: hbase   文件: PrefixKeyDeltaEncoder.java
private void writeKeyExcludingCommon(Cell cell, int commonPrefix, DataOutputStream out)
    throws IOException {
  short rLen = cell.getRowLength();
  if (commonPrefix < rLen + KeyValue.ROW_LENGTH_SIZE) {
    // Previous and current rows are different. Need to write the differing part followed by
    // cf,q,ts and type
    PrivateCellUtil.writeRowKeyExcludingCommon(cell, rLen, commonPrefix, out);
    byte fLen = cell.getFamilyLength();
    out.writeByte(fLen);
    PrivateCellUtil.writeFamily(out, cell, fLen);
    PrivateCellUtil.writeQualifier(out, cell, cell.getQualifierLength());
    out.writeLong(cell.getTimestamp());
    out.writeByte(cell.getTypeByte());
  } else {
    // The full row key part is common. CF part will be common for sure as we deal with Cells in
    // same family. Just need write the differing part in q, ts and type
    commonPrefix = commonPrefix - (rLen + KeyValue.ROW_LENGTH_SIZE)
        - (cell.getFamilyLength() + KeyValue.FAMILY_LENGTH_SIZE);
    int qLen = cell.getQualifierLength();
    int commonQualPrefix = Math.min(commonPrefix, qLen);
    int qualPartLenToWrite = qLen - commonQualPrefix;
    if (qualPartLenToWrite > 0) {
      PrivateCellUtil.writeQualifierSkippingBytes(out, cell, qLen, commonQualPrefix);
    }
    commonPrefix -= commonQualPrefix;
    // Common part in TS also?
    if (commonPrefix > 0) {
      int commonTimestampPrefix = Math.min(commonPrefix, KeyValue.TIMESTAMP_SIZE);
      if (commonTimestampPrefix < KeyValue.TIMESTAMP_SIZE) {
        byte[] curTsBuf = Bytes.toBytes(cell.getTimestamp());
        out.write(curTsBuf, commonTimestampPrefix, KeyValue.TIMESTAMP_SIZE
            - commonTimestampPrefix);
      }
      commonPrefix -= commonTimestampPrefix;
      if (commonPrefix == 0) {
        out.writeByte(cell.getTypeByte());
      }
    } else {
      out.writeLong(cell.getTimestamp());
      out.writeByte(cell.getTypeByte());
    }
  }
}