Sqoop Commands - Latest

Download as txt, pdf, or txt
Download as txt, pdf, or txt
You are on page 1of 4
At a glance
Powered by AI
The document discusses various ways to import and export data between MySQL, HDFS and Hive using Sqoop.

We can use the sqoop import command to import a table, query or all tables from MySQL into HDFS.

We can use the sqoop import command with the --hive-import flag to import data into Hive managed tables from MySQL.

In Mysql

-------
create database college;
use college;

CREATE TABLE student_master(


student_id INT NOT NULL AUTO_INCREMENT,
name VARCHAR(40) NOT NULL,
address VARCHAR(40) NOT NULL,
PRIMARY KEY ( student_id ));

CREATE TABLE fy(


fy_id INT NOT NULL AUTO_INCREMENT,
student_id INT NOT NULL,
result double NOT NULL,
PRIMARY KEY (fy_id ));

show tables;

describe student_master;

INSERT INTO student_master


(name, address)
VALUES
("Sanjay", "Bangalore");

INSERT INTO student_master


(name, address)
VALUES
("Rajiv", "Delhi");

INSERT INTO student_master


(name, address)
VALUES
("Rajesh", "Chennai");

INSERT INTO student_master


(name, address)
VALUES
("Sandeep", "Delhi");

INSERT INTO fy
(student_id, result)
VALUES
(1, 81.90);

INSERT INTO fy
(student_id, result)
VALUES
(2, 78.90);

LIST DATABASES
-------------
[hduser@ubuntu ~]$ sqoop list-databases --connect jdbc:mysql://127.0.0.1 --username
root --password '';
LIST TABLES in a database
--------------------------
[hduser@ubuntu ~]$ sqoop list-tables --connect jdbc:mysql://localhost/college
--username root --password '';

Import one table (with key)from mysql into HDFS


------------------------------------------------
[hduser@ubuntu ~]$ sqoop import --connect jdbc:mysql://localhost/college --username
root --password '' --table student_master --target-dir /iiht/student_master;

Import one table (without key)from mysql into HDFS


------------------------------------------------
[hduser@ubuntu ~]$ sqoop import --connect jdbc:mysql://localhost/college --username
root --password '' --table topten --target-dir /iiht/topten -m 1;

WITH WHERE CLAUSE


----------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/college --username root --password ''
--table student_master --where 'student_id=1 or student_id=3' --target-dir
/iiht/query -m 1;

WITH QUERY
----------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/college --username root --password ''
--query 'select * from student_master where $CONDITIONS and student_id=2' --target-
dir /iiht/query1 -m 1;

WITH INNER JOINS in form of Query


----------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/college --username root --password ''
--query 'select a.student_id, a.name, a.address, b.result from student_master a, fy
b where $CONDITIONS and a.student_id=b.student_id' --target-dir /iiht/query4 -m 1;

WITH LEFT OUTER JOINS in form of Query


----------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/college --username root --password ''
--query 'select a.student_id, a.name, a.address, b.result from
student_master a left outer join fy b on a.student_id = b.student_id where
$CONDITIONS' --target-dir /iiht/query5 -m 1;

WITH RIGHT OUTER JOINS in form of Query


----------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/college --username root --password ''
--query 'select a.student_id, a.name, a.address, b.result from
student_master a right outer join fy b on a.student_id = b.student_id where
$CONDITIONS' --target-dir /niit/query6 -m 1;

Import all tables from mysql into hdfs


----------------------------------------------------
[hduser@ubuntu ~]$ sqoop import-all-tables --connect jdbc:mysql://localhost/college
--username root --password '' --warehouse-dir /iiht/all_tables;

Import into hive managed tables


----------------------------------------------------
First create a database college in hive
hive> create database college;
hive> quit;

sqoop import --connect jdbc:mysql://localhost/college --username root --password


'' --table student_master --hive-import --hive-table iiht.student_profile -m 1;

sqoop import --connect jdbc:mysql://localhost/college --username root --password


'' --table fy --hive-import --hive-table niit.fyresults -m 1;

sqoop import --connect jdbc:mysql://localhost/college --username root --password


'' --table student_master --columns "student_id,name" --hive-import --hive-table
niit.student_name -m 1;

sqoop import --connect jdbc:mysql://localhost/college --username root --password ''


--query 'select a.student_id, a.name, a.address, b.result from student_master a, fy
b where $CONDITIONS and a.student_id=b.student_id' --hive-import --hive-table
iiht.student_join --target-dir /iiht/query20 -m 1;

Importing Data into Hbase using Sqoop


----------------------------------------------------
$ hbase shell
hbase > create 'college', 'student_profile', 'fyresults'
hbase>describe 'college'
hbase> exit

sqoop import --connect jdbc:mysql://localhost/college --username root --password ''


--table student_master --columns "student_id,name,address" --hbase-table college
--column-family student_profile --hbase-row-key student_id -m 1;

sqoop import --connect jdbc:mysql://localhost/college --username root --password ''


--table fy --columns "student_id,result" --hbase-table college --column-family
fyresults --hbase-row-key student_id -m 1;

$ hbase shell
hbase > scan 'college'

sqoop export command


----------------------------------------------------
nano /home/hduser/employee.txt
1201,satish,delhi
1202,krishna,mumbai
1203,amith,pune
1204,javed,chennai
1205,prudvi,bangalore

hadoop fs -put employee.txt /user/training


In MySQl
create database employee;
use employee;
CREATE TABLE employee_master(
employee_id INT NOT NULL AUTO_INCREMENT,
name VARCHAR(40) NOT NULL,
address VARCHAR(40) NOT NULL,
PRIMARY KEY ( employee_id ));

sqoop export --connect jdbc:mysql://localhost/employee --username root --password


'' --table employee_master --update-mode allowinsert --update-key employee_id
--export-dir /user/training/employee.txt --input-fields-terminated-by ',' ;

You might also like