This can be achieved easily with a sqoop job
1. Create a sqoop job(There is a space before "import")
sqoop job --create JobName6 \
-- import \
--connect jdbc:mysql://localhost:3306/retail_db \
--username=username \
--password-file /user/sqoop/password \
--table departments \
--target-dir /user/hive/warehouse/test.db/departments \
--table departments \
--split-by department_id \
--check-column department_id \
--incremental append \
--last-value 0;
2. Run the sqoop job
sqoop job --exec JobName6;
check the values in the location in HDFS
3. Insert some data in source table (mysql)
INSERT INTO departments VALUES (9,'New Data1 '),(10,'New Data2');
2. Run the sqoop job again .
sqoop job --exec JobName6;
check the values in the location in HDFS again .
Similarly for Hive Import
sqoop job --create JobName1 \
-- import \
--connect jdbc:mysql://localhost:3306/retail_db \
--username=username\
--password-file /user/sqoop/password \
--table departments \
--hive-import \
--hive-table department \
--split-by department_id \
--check-column department_id \
--incremental append \
--last-value 0;