h base development
TRANSCRIPT
![Page 1: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/1.jpg)
The
term
pla
net-s
ize
web
applic
atio
n com
es to
min
d, and in
this
cas
e it
is fi
ttin
g
Writing Codes in
![Page 2: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/2.jpg)
lib
commons-configuration-1.8.jar
commons-lang-2.6.jar
commons-logging-1.1.1.jar
hadoop-core-1.0.0.jar
hbase-0.92.1.jar
log4j-1.2.16.jar
slf4j-api-1.5.8.jar
slf4j-log4j12-1.5.8.jar
zookeeper-3.4.3.jar
![Page 3: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/3.jpg)
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
![Page 4: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/4.jpg)
Configuration hConf = HBaseConfiguration.create(conf); hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_QUORUM, hbaseZookeeperQuorum); hConf.set(Constants.HBASE_CONFIGURATION_ZOOKEEPER_CLIENTPORT, hbaseZookeeperClientPort); HTable hTable = new HTable(hConf, tableName);
![Page 5: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/5.jpg)
public class HBaseTest {
private static Configuration conf = null;
/**
* Initialization
*/
static {
conf = HBaseConfiguration.create();
}
}
![Page 6: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/6.jpg)
/**
* Create a table
*/
public static void creatTable(String tableName, String[] familys)
throws Exception {
HBaseAdmin admin = new HBaseAdmin(conf);
if (admin.tableExists(tableName)) {
System.out.println("table already exists!");
} else {
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
for (int i = 0; i < familys.length; i++) {
tableDesc.addFamily(new HColumnDescriptor(familys[i]));
}
admin.createTable(tableDesc);
System.out.println("create table " + tableName + " ok.");
}
}
![Page 7: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/7.jpg)
/**
* Delete a table
*/
public static void deleteTable(String tableName) throws Exception {
try {
HBaseAdmin admin = new HBaseAdmin(conf);
admin.disableTable(tableName);
admin.deleteTable(tableName);
System.out.println("delete table " + tableName + " ok.");
} catch (MasterNotRunningException e) {
e.printStackTrace();
} catch (ZooKeeperConnectionException e) {
e.printStackTrace();
}
}
![Page 8: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/8.jpg)
/**
* Put (or insert) a row
*/
public static void addRecord(String tableName, String rowKey,
String family, String qualifier, String value) throws Exception {
try {
HTable table = new HTable(conf, tableName);
Put put = new Put(Bytes.toBytes(rowKey));
put.add(Bytes.toBytes(family), Bytes.toBytes(qualifier), Bytes
.toBytes(value));
table.put(put);
System.out.println("insert recored " + rowKey + " to table "
+ tableName + " ok.");
} catch (IOException e) {
e.printStackTrace();
}
}
![Page 9: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/9.jpg)
/**
* Delete a row
*/
public static void delRecord(String tableName, String rowKey)
throws IOException {
HTable table = new HTable(conf, tableName);
List<Delete> list = new ArrayList<Delete>();
Delete del = new Delete(rowKey.getBytes());
list.add(del);
table.delete(list);
System.out.println("del recored " + rowKey + " ok.");
}
![Page 10: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/10.jpg)
/**
* Get a row
*/
public static void getOneRecord (String tableName, String rowKey) throws IOException{
HTable table = new HTable(conf, tableName);
Get get = new Get(rowKey.getBytes());
Result rs = table.get(get);
for(KeyValue kv : rs.raw()){
System.out.print(new String(kv.getRow()) + " " );
System.out.print(new String(kv.getFamily()) + ":" );
System.out.print(new String(kv.getQualifier()) + " " );
System.out.print(kv.getTimestamp() + " " );
System.out.println(new String(kv.getValue()));
}
}
![Page 11: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/11.jpg)
/**
* Scan (or list) a table
*/
public static void getAllRecord (String tableName) {
try{
HTable table = new HTable(conf, tableName);
Scan s = new Scan();
ResultScanner ss = table.getScanner(s);
for(Result r:ss){
for(KeyValue kv : r.raw()){
System.out.print(new String(kv.getRow()) + " "); System.out.print(new String(kv.getFamily()) + ":"); System.out.print(new String(kv.getQualifier()) + " ");
System.out.print(kv.getTimestamp() + " "); System.out.println(new String(kv.getValue())); } }
} catch (IOException e){
e.printStackTrace();
}
}
![Page 12: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/12.jpg)
public static void main(String[] agrs) {
try {
String tablename = "scores";
String[] familys = { "grade", "course" };
HBaseTest.creatTable(tablename, familys);
// add record zkb
HBaseTest.addRecord(tablename, "zkb", "grade", "", "5");
HBaseTest.addRecord(tablename, "zkb", "course", "", "90");
HBaseTest.addRecord(tablename, "zkb", "course", "math", "97");
HBaseTest.addRecord(tablename, "zkb", "course", "art", "87");
// add record baoniu
HBaseTest.addRecord(tablename, "baoniu", "grade", "", "4");
HBaseTest.addRecord(tablename, "baoniu", "course", "math", "89");
![Page 13: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/13.jpg)
System.out.println("===========get one record========");
HBaseTest.getOneRecord(tablename, "zkb");
System.out.println("===========show all record========");
HBaseTest.getAllRecord(tablename);
System.out.println("===========del one record========");
HBaseTest.delRecord(tablename, "baoniu");
HBaseTest.getAllRecord(tablename);
System.out.println("===========show all record========");
HBaseTest.getAllRecord(tablename);
} catch (Exception e) {
e.printStackTrace();
}
}}
![Page 14: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/14.jpg)
Sqoop (“SQL-to-Hadoop”) is a straightforward command-line tool with the following capabilities:
Imports individual tables or entire databases to files in HDFS
Generates Java classes to allow you to interact with your imported data
Provides the ability to import from SQL databases straight into your Hive data warehouse
![Page 15: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/15.jpg)
sqoop --connect jdbc:mysql://db.example.com/website --table USERS \ --local --hive-import
This would connect to the MySQL database on this server and import the USERS table into HDFS. The –-local option instructs Sqoop to take advantage of a local MySQL connection which performs very well. The –-hive-import option means that after reading the data into HDFS, Sqoop will connect to the Hive metastore, create a table named USERS with the same columns and types (translated into their closest analogues in Hive), and load the data into the Hive warehouse directory on HDFS
![Page 16: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/16.jpg)
Suppose you wanted to work with this data in MapReduce and weren’t concerned with Hive. When storing this table in HDFS, you might want to take advantage of compression, so you’d like to be able to store the data in Sequence Files.
sqoop --connect jdbc:mysql://db.example.com/website --table USERS \ --as-sequencefile
Sqoop includes some other commands which allow you to inspect the database you are working with. For example, you can list the available database schemas (with the sqoop-list-databases tool) and tables within a schema (with the sqoop-list-tables tool). Sqoop also includes a primitive SQL execution shell (the sqoop-eval tool)
![Page 17: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/17.jpg)
sqoop help
usage: sqoop COMMAND [ARGS]
Available commands:
codegen Generate code to interact with database records
create-hive-table Import a table definition into Hive
eval Evaluate a SQL statement and display the results
export Export an HDFS directory to a database table
help List available commands
import Import a table from a database to HDFS
import-all-tables Import tables from a database to HDFS
list-databases List available databases on a server
list-tables List available tables in a database
version Display version information
See 'sqoop help COMMAND' for information on a specific command.
![Page 18: H base development](https://reader036.vdocuments.site/reader036/viewer/2022070316/555c3e74d8b42a0b038b4c85/html5/thumbnails/18.jpg)
sqoop help import
usage: sqoop import [GENERIC-ARGS] [TOOL-ARGS]
Common arguments:
--connect <jdbc-uri> Specify JDBC connect string
--connect-manager <jdbc-uri> Specify connection manager class to use
--driver <class-name> Manually specify JDBC driver class to use
--hadoop-home <dir> Override $HADOOP_HOME
--help Print usage instructions
-P Read password from console
--password <password> Set authentication password
--username <username> Set authentication username
--verbose Print more information while working