Skip to main content

sqoop

----------for database connect--------
mysql -u root -pcloudera
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
>>sqoop import --connect jdbc://localhost/retail_db --username root --password cloudera --table customers --target-dir /data/sqoop/test01
>>sqoop import --connect jdbc:mysql://localhost/retail_db --username root --password cloudera --table customers --target-dir /data/sqoop/test02 -m 10
>>sqoop import --connect jdbc:mysql://localhost/retail_db --username root --password cloudera --table customers --target-dir /data/sqoop/test03 --split-by customer_id --fields-terminated-by '|' -m 10
>>sqoop list-databases --connect jdbc:mysql://localhost/retail_db --username root --password cloudera
>>sqoop list-tables --connect jdbc:mysql://localhost/retail_db --username root --password cloudera
----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
sqoop import --connect jdbc:mysql://localhost/retail_db --username root --password cloudera --table customers  --split-by customer_id --fields-terminated-by '|' -m 10 --hive-import --create-hive-table --hive-table veer.customers2

---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
My job,export,dynamic partition...



create table customer5 (customer_id int , customer_fname varchar , customer_lname varchar,
       customer_email varchar,customer_password varchar, customer_street varchar, customer_city varchar ,
       customer_state varchar,customer_zipcode varchar);


hive>> create table customer_inc (customer_id int , customer_fname string , customer_lname string,
       customer_email string,customer_password string, customer_street string, customer_city string ,
       customer_state string ,customer_zipcode string);

sqoop import --connect jdbc:mysql://localhost/retail_db --username root --password cloudera --table customers --split-by customer_id --fields-terminated-by ',' -m 10 --hcatalog-database veer --hcatalog-table customer4;


>>>sqoop job --create import --connect jdbc:mysql://localhost/retail_db --username root --password cloudera --table customers --hive-import --hive-table customer4 --hive-partition-key customer_zipcode;
>>>
sqoop import --connect jdbc:mysql://localhost/retail_db \
--username root --password cloudera \
--hive-import \
--query "select customer_id ,customer_fname,customer_lname,customer_email,customer_password,customer_street,customer_city,customer_state from retail_db.customers where customer_zipcode='00725' and \$CONDITIONS" \
--hive-table customer4 \
--hive-partition-key customer_zipcode \
--hive-partition-value 'PR00725' \
--target-dir /user/cloudera/customer_temp \
--split-by customer_id

sqoop import --connect jdbc:mysql://localhost/retail_db \
--username root --password cloudera \
--hive-import \
--query "select * from retail_db.customers where \$CONDITIONS" \
--hive-table customer5 \
--hive-partition-key customer_zipcode \
--target-dir /user/cloudera/customer_temp \
--split-by customer_id


--table customers --where "customer_zipcode='PR00725'"




_____________________________________________________________________________________________________________________________________________________________________
create table ct1 select * from customers where 1=2;
sqoop export --connect jdbc:mysql://localhost/retail_db \
--username root --password cloudera \
--table temp \
--export-dir /user/hive/warehouse/temp
______________________________________________________________________________________________
sqoop job -create inc_upt import --connect jdbc:mysql://localhost/retail_db \
--username root --password cloudera \
--table customers \
--incremental append \
--check-column customer_id  last-value 12435 \
--target-dir /data/sqoop/test02


sqoop job –create jobname –<space>import –connect jdbc:mysql://localhost/DBname –username username –password password –table tablename –incremental append –check-column colname last-value 101

Comments

Popular posts from this blog

How to learn Hadoop

First of all, I want to tell you. here all content you will get  practical only. for the theoretical part, you can follow edureka videos and Durgasoft videos. once you complete some videos related to map-reduce and Hadoop ecosystem..after that you can follow me... you need to go step by step:- 1. What is big data 2. What is the need of Hadoop 3. How Hadoop works. 4. Tools in Hadoop ecosystem. If you are a beginner then first you need some configuration in your system which is: 1: RAM:-             Minimum RAM size should be 8gb otherwise you will get frustrated because of a slow system.             if you go with 16gb RAM then it will be great. My opinion is to go with 16gb RAM. 2: Download VMWARE. https://my.vmware.com/en/web/vmware/info/slug/desktop_end_user_computing/vmware_workstation_pro/14_0 3. Download Cloudera -> https://www.cloudera.com/downloads/cdh/5-14-0.html . inst...

important

create database veer; use veer; create table item_details(item_id int,item_name varchar(30),item_price int,item_color varchar(10)) row format delimited fields terminated by',' lines terminated by '\n' stored as textfile; hadoop fs -put /home/cloudera/sk/data/item.txt /user/hive/warehouse/veer.db/item select count(*) from item_details; select * from item_details order by item_id desc; select item_color,count(*) from item_details group by item_color; ---------------------------31-01-2018---------------------------------- create table flight_nt(s_id int,s_name varchar(30),code varchar(20),state varchar(40)) row format delimited fields terminated by',' lines terminated by '\n' stored as textfile; create table flightdata_part (s_id int, s_name varchar(30), code varchar(20) ) PARTITIONED BY (state VARCHAR(64)) row format delimited fields terminated by ',' lines terminated by '\n'  stored as textfile; LOAD DATA LOCAL INPATH '/local/home/srv...