首页 > 代码库 > HBase编程 API入门之delete.deleteColumn和delete.deleteColumns区别
HBase编程 API入门之delete.deleteColumn和delete.deleteColumns区别
delete.deleteColumn和delete.deleteColumns区别是
deleteColumn是删除某一个列簇里的最新时间戳版本。
delete.deleteColumns是删除某个列簇里的所有时间戳版本。
hbase(main):020:0> desc ‘test_table‘
Table test_table is ENABLED
test_table
COLUMN FAMILIES DESCRIPTION
{NAME => ‘f‘, DATA_BLOCK_ENCODING => ‘NONE‘, BLOOMFILTER => ‘ROW‘, REPLICATION_SCOPE => ‘0‘, VERSIONS => ‘1‘, COMPRESSION => ‘NONE‘, MIN_VERSIONS => ‘0‘, TTL => ‘FOREVER‘, KEEP_DELETED_CELLS
=> ‘FALSE‘, BLOCKSIZE => ‘65536‘, IN_MEMORY => ‘false‘, BLOCKCACHE => ‘true‘}
1 row(s) in 0.2190 seconds
hbase(main):021:0> scan ‘test_table‘
ROW COLUMN+CELL
row_01 column=f:col, timestamp=1478102698687, value=http://www.mamicode.com/maizi
row_01 column=f:name, timestamp=1478104345828, value=http://www.mamicode.com/Andy
row_02 column=f:name, timestamp=1478104477628, value=http://www.mamicode.com/Andy2
row_03 column=f:name, timestamp=1478104823358, value=http://www.mamicode.com/Andy3
3 row(s) in 0.2270 seconds
hbase(main):022:0> scan ‘test_table‘
ROW COLUMN+CELL
row_01 column=f:col, timestamp=1478102698687, value=http://www.mamicode.com/maizi
row_01 column=f:name, timestamp=1478104345828, value=http://www.mamicode.com/Andy
row_02 column=f:name, timestamp=1478104477628, value=http://www.mamicode.com/Andy2
row_03 column=f:name, timestamp=1478104823358, value=http://www.mamicode.com/Andy3
3 row(s) in 0.1480 seconds
hbase(main):023:0> scan ‘test_table‘,{VERSIONS=>3}
ROW COLUMN+CELL
row_01 column=f:col, timestamp=1478102698687, value=http://www.mamicode.com/maizi
row_01 column=f:name, timestamp=1478104345828, value=http://www.mamicode.com/Andy
row_02 column=f:name, timestamp=1478104477628, value=http://www.mamicode.com/Andy2
row_03 column=f:name, timestamp=1478104823358, value=http://www.mamicode.com/Andy3
3 row(s) in 0.1670 seconds
hbase(main):024:0>
package zhouls.bigdata.HbaseProject.Test1;
import javax.xml.transform.Result;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class HBaseTest {
public static void main(String[] args) throws Exception {
HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy0"));//列簇是f,列修饰符是name,值是Andy0
// put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
table.put(put);
table.close();
// Get get = new Get(Bytes.toBytes("row_04"));
// get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
// org.apache.hadoop.hbase.client.Result rest = table.get(get);
// System.out.println(rest.toString());
// table.close();
// Delete delete = new Delete(Bytes.toBytes("row_2"));
// delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
// delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
// table.delete(delete);
// table.close();
// Delete delete = new Delete(Bytes.toBytes("row_03"));
// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));
// table.delete(delete);
// table.close();
}
public static Configuration getConfig(){
Configuration configuration = new Configuration();
// conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
return configuration;
}
}
hbase(main):038:0> scan ‘test_table‘
ROW COLUMN+CELL
row_01 column=f:col, timestamp=1478102698687, value=http://www.mamicode.com/maizi
row_01 column=f:name, timestamp=1478104345828, value=http://www.mamicode.com/Andy
row_02 column=f:name, timestamp=1478104477628, value=http://www.mamicode.com/Andy2
row_03 column=f:name, timestamp=1478123664884, value=http://www.mamicode.com/Andy3
3 row(s) in 0.1910 seconds
hbase(main):039:0> scan ‘test_table‘
ROW COLUMN+CELL
row_01 column=f:col, timestamp=1478102698687, value=http://www.mamicode.com/maizi
row_01 column=f:name, timestamp=1478104345828, value=http://www.mamicode.com/Andy
row_02 column=f:name, timestamp=1478104477628, value=http://www.mamicode.com/Andy2
row_03 column=f:name, timestamp=1478123664884, value=http://www.mamicode.com/Andy3
row_04 column=f:name, timestamp=1478123917775, value=http://www.mamicode.com/Andy0
4 row(s) in 0.1310 seconds
delete.deleteColumn和delete.deleteColumns区别是
deleteColumn是删除某一个列簇里的最新时间戳版本。
delete.deleteColumns是删除某个列簇里的所有时间戳版本。
package zhouls.bigdata.HbaseProject.Test1;
import javax.xml.transform.Result;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class HBaseTest {
public static void main(String[] args) throws Exception {
HTable table = new HTable(getConfig(),TableName.valueOf("test_table"));//表名是test_table
Put put = new Put(Bytes.toBytes("row_04"));//行键是row_04
put.add(Bytes.toBytes("f"),Bytes.toBytes("name"),Bytes.toBytes("Andy1"));//列簇是f,列修饰符是name,值是Andy0
// put.add(Bytes.toBytes("f2"),Bytes.toBytes("name"),Bytes.toBytes("Andy3"));//列簇是f2,列修饰符是name,值是Andy3
table.put(put);
table.close();
// Get get = new Get(Bytes.toBytes("row_04"));
// get.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("age"));如现在这样,不指定,默认把所有的全拿出来
// org.apache.hadoop.hbase.client.Result rest = table.get(get);
// System.out.println(rest.toString());
// table.close();
// Delete delete = new Delete(Bytes.toBytes("row_2"));
// delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("email"));
// delete.deleteColumn(Bytes.toBytes("f1"), Bytes.toBytes("name"));
// table.delete(delete);
// table.close();
// Delete delete = new Delete(Bytes.toBytes("row_03"));
// delete.deleteColumn(Bytes.toBytes("f"), Bytes.toBytes("name"));
// delete.deleteColumns(Bytes.toBytes("f"), Bytes.toBytes("name"));
// table.delete(delete);
// table.close();
}
public static Configuration getConfig(){
Configuration configuration = new Configuration();
// conf.set("hbase.rootdir","hdfs:HadoopMaster:9000/hbase");
configuration.set("hbase.zookeeper.quorum", "HadoopMaster:2181,HadoopSlave1:2181,HadoopSlave2:2181");
return configuration;
}
}
delete.deleteColumn和delete.deleteColumns区别是
deleteColumn是删除某一个列簇里的最新时间戳版本。
delete.deleteColumns是删除某个列簇里的所有时间戳版本。
HBase编程 API入门之delete.deleteColumn和delete.deleteColumns区别