1、mysql数据源:
create database userdb;
use userdb;
DROP TABLE IF EXISTS `emp`;
CREATE TABLE `emp` (
`id` int(11) NOT NULL,
`name` varchar(100) DEFAULT NULL,
`deg` varchar(100) DEFAULT NULL,
`salary` int(11) DEFAULT NULL,
`dept` varchar(10) DEFAULT NULL,
PRIMARY KEY (`id`)
);
INSERTINTO `emp` VALUES ('1201', 'gopal', 'manager', '50000', 'TP');
INSERT INTO `emp` VALUES ('1202', 'manisha','Proof reader', '50000', 'TP');
INSERT INTO `emp` VALUES ('1203', 'khalil','php dev', '30000', 'AC');
INSERT INTO `emp` VALUES ('1204', 'prasanth','php dev', '30000', 'AC');
INSERT INTO `emp` VALUES ('1205', 'kranthi','admin', '20000', 'TP');
DROP TABLE IF EXISTS `emp_add`;
CREATE TABLE `emp_add` (
`id` int(11) NOT NULL,
`hno` varchar(100) DEFAULT NULL,
`street` varchar(100) DEFAULT NULL,
`city` varchar(100) DEFAULT NULL,
PRIMARY KEY (`id`)
);
INSERTINTO `emp_add` VALUES ('1201', '288A', 'vgiri', 'jublee');
INSERT INTO `emp_add` VALUES ('1202', '108I','aoc', 'sec-bad');
INSERT INTO `emp_add` VALUES ('1203', '144Z','pgutta', 'hyd');
INSERT INTO `emp_add` VALUES ('1204', '78B','old city', 'sec-bad');
INSERT INTO `emp_add` VALUES ('1205', '720X','hitec', 'sec-bad');
2、配置环境变量
vi /etc/profile
export HADOOP_CLASSPATH=/home/zjw/hadoop-2.6.0/lib
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$HIVE_HOME/lib/*
source /etc/profile
3、hive创建数据库
执行hive
create database itcast;
4、复制hive/conf下的hive-site.xml到sqoop工作目录的conf下
cp /hive/apache-hive-1.2.2-bin/conf/hive-site.xml /sqoop/sqoop-1.4.7.bin__hadoop-2.6.0/conf/
5、执行mysql 导入hive
sqoop import --connect jdbc:mysql://192.168.1.7:3306/userdb --username root --password zjw --table emp_add --hive-table itcast.emp_add_sp --create-hive-table --hive-import --num-mappers 1