org.apache.hadoop.hbase.client.Delete#deleteFamily ( )源码实例Demo

下面列出了org.apache.hadoop.hbase.client.Delete#deleteFamily ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: Eagle   文件: HBaseLogDeleter.java
private Delete createDelete(byte[] row) throws IOException{
	Delete delete = new Delete(row);
	delete.deleteFamily(columnFamily.getBytes());
	return delete;
}
 
源代码2 项目: eagle   文件: HBaseLogDeleter.java
private Delete createDelete(byte[] row) throws IOException {
    Delete delete = new Delete(row);
    delete.deleteFamily(columnFamily.getBytes());
    return delete;
}
 
protected void doReconstructionLog(final Path oldCoreLogFile, final long minSeqId, final long maxSeqId,
        final Progressable reporter) throws UnsupportedEncodingException, IOException {

    Path trxPath = new Path(oldCoreLogFile.getParent(), THLog.HREGION_OLD_THLOGFILE_NAME);

    // We can ignore doing anything with the Trx Log table, it is
    // not-transactional.
    if (super.getTableDesc().getNameAsString().equals(HBaseBackedTransactionLogger.TABLE_NAME)) {
        return;
    }

    THLogRecoveryManager recoveryManager = new THLogRecoveryManager(this);
    Map<Long, WALEdit> commitedTransactionsById = recoveryManager.getCommitsFromLog(trxPath, minSeqId, reporter);

    if (commitedTransactionsById != null && commitedTransactionsById.size() > 0) {
        LOG.debug("found " + commitedTransactionsById.size() + " COMMITED transactions to recover.");

        for (Entry<Long, WALEdit> entry : commitedTransactionsById.entrySet()) {
            LOG.debug("Writing " + entry.getValue().size() + " updates for transaction " + entry.getKey());
            WALEdit b = entry.getValue();

            for (KeyValue kv : b.getKeyValues()) {
                // FIXME need to convert these into puts and deletes. Not sure this is
                // the write way.
                // Could probably combine multiple KV's into single put/delete.
                // Also timestamps?
                if (kv.getType() == KeyValue.Type.Put.getCode()) {
                    Put put = new Put();
                    put.add(kv);
                    super.put(put);
                } else if (kv.isDelete()) {
                    Delete del = new Delete(kv.getRow());
                    if (kv.isDeleteFamily()) {
                        del.deleteFamily(kv.getFamily());
                    } else if (kv.isDeleteType()) {
                        del.deleteColumn(kv.getFamily(), kv.getQualifier());
                    }
                }

            }

        }

        LOG.debug("Flushing cache"); // We must trigger a cache flush,
        // otherwise we will would ignore the log on subsequent failure
        if (!super.flushcache()) {
            LOG.warn("Did not flush cache");
        }
    }
}
 
源代码4 项目: bigdata-tutorial   文件: HBaseUtils.java
/**
 * delete a row column family by rowkey
 *
 * @param table
 * @param rowKey
 * @throws Exception
 */
public static void deleteFamily(HTableInterface table, String rowKey, String family) throws Exception {
	Delete delete = new Delete(Bytes.toBytes(rowKey));
	delete.deleteFamily(Bytes.toBytes(family));
	table.delete(delete);
	LOGGER.info(">>>> HBase Delete {} data with key = {}, columnFamily = {}.", new String(table.getTableName()), rowKey, family);
}
 
源代码5 项目: bigdata-tutorial   文件: HBaseDMLHandler.java
/**
 * delete a row identified by rowkey
 *
 * @param tableName
 * @param rowKey
 * @throws Exception
 */
public void deleteFamily(String tableName, String rowKey, String family) throws Exception {
	HTableInterface htable = getTable(tableName);
	Delete delete = new Delete(Bytes.toBytes(rowKey));
	delete.deleteFamily(Bytes.toBytes(family));
	htable.delete(delete);
}