Sqoop command line (CLI)
sqoop help
usage: sqoop COMMAND [ARGS]
Available commands:
codegen Generate code to interact with database records
create-hive-table Import a table definition into Hive
eval Evaluate a SQL statement and display the results
export Export an HDFS directory to a database table
help List available commands
import Import a table from a database to HDFS
import-all-tables Import tables from a database to HDFS
import-mainframe Import datasets from a mainframe server to HDFS
job Work with saved jobs
list-databases List available databases on a server
list-tables List available tables in a database
merge Merge results of incremental imports
metastore Run a standalone Sqoop metastore
version Display version information
sqoop help import
sqoop import --help
sqoop version
sqoop-versionsqoop import --connect jdbc:mysql://database.example.com/employees --username satya --password sat345
$ sqoop import --connect jdbc:mysql://localhost:3306/hadoop --driver com.mysql.jdbc.Driver --username root --password r00t --table bse
$ sqoop import --connect jdbc:mysql://localhost:3306/hadoop --driver com.mysql.jdbc.Driver --username root --P --table bsmcg --hbase-row-key date --hbase-create-table --column-family firstdecade --hbase-table bsmcg
sqoop import --query 'SELECT a.*, b.* FROM a JOIN b on (a.id == b.id) WHERE $CONDITIONS' --split-by a.id --target-dir /user/foo/joinresults
sqoop import --connect <connect-str> --table SomeTable --package-name com.foocorp
sqoop import --connect jdbc:mysql://db.foo.com/corp --table EMPLOYEES --username SomeUser -P
sqoop import --connect jdbc:mysql://db.foo.com/corp --table EMPLOYEES --fields-terminated-by '\t' --lines-terminated-by '\n' --optionally-enclosed-by '\"'
sqoop import --connect jdbc:mysql://db.foo.com/corp --table EMPLOYEES --hive-import
sqoop import-all-tables --connect jdbc:mysql://db.foo.com/corp
$ sqoop export --connect jdbc:mysql://localhost:3306/hadoop --driver com.mysql.jdbc.Driver --username root --password root --table bsmcg --export-dir /user/satya/bsmcg
$ sqoop export --connect jdbc:mysql://localhost:3306/hadoop --driver com.mysql.jdbc.Driver --username root --password root --table bse --export-dir /user/hive/warehouse/bse
sqoop export --connect jdbc:mysql://db.example.com/foo --table bar --export-dir /results/bar_data
$ sqoop create-hive-table --connect jdbc:mysql://localhost:3306/hadoop --driver com.mysql.jdbc.Driver --username root --P --table bse
sqoop create-hive-table --connect jdbc:mysql://db.example.com/corp --table employees --hive-table emp
sqoop list-databases --connect jdbc:mysql://database.example.com/
sqoop list-databases --connect jdbc:mysql://sqoop......rds.amazonaws.com:3306 --username admin -P
sqoop list-tables --connect jdbc:postgresql://database.example.com/corp
sqoop list-tables --connect jdbc:mysql://sqoop......rds.amazonaws.com:3306/ssDB --username admin -P
sqoop job --list
sqoop job --show myjob
sqoop job --create myjob -- import --connect jdbc:mysql://example.com/db --table mytable
sqoop job --exec myjob -- --username someuser -P
sqoop merge --new-data newer --onto older --target-dir merged --jar-file datatypes.jar --class-name Foo --merge-key id
sqoop codegen --connect jdbc:mysql://db.example.com/corp --table employees
sqoop eval --connect jdbc:mysql://db.example.com/corp --query "SELECT * FROM employees LIMIT 10"
sqoop eval --connect jdbc:mysql://db.example.com/corp -e "INSERT INTO foo VALUES(42, 'bar')"
Related Articles: Spark Interview Questions