#! /bin/bash
case $1 in
"start")
nohup flume-ng agent --conf /opt/installs/flume1.9/conf --name a1 --conf-file /opt/installs/flume1.9/job/mall2.conf >/opt/installs/flume1.9/out2.txt 2>&1 &
;;
"stop")
ps -ef | grep mall2.conf | grep -v grep | awk '{print $2}' | xargs kill
;;
*)
echo "请输入正确的参数"
;;
esac
-->一个简单的启动脚本<--
#!/bin/bash
#验证参数
if(($#==0))
then
echo 请传入要执行的命令!
exit;
fi
echo "要执行的命令是:$@"
#批量执行
for((i=11;i<=13;i++))
do
echo -----------------------hadoop$i---------------------
ssh hadoop$i "source /etc/profile;$@"
done
-->虚拟机群命令脚本<--
flume采集脚本:自己去官网找
sqoop采集脚本:RDBMS(mysql) -> HDFS:
bin/sqoop import
--driver com.mysql.jdbc.Driver
--connect jdbc:mysql://hadoop10:3306/test1?characterEncoding=UTF-8
--username root
--password 123456
--table t_user
--num-mappers 4
--fields-terminated-by 't'
--target-dir /mysql/t_user
--delete-target-dir
bin/sqoop import
--driver com.mysql.jdbc.Driver
--connect jdbc:mysql://hadoop10:3306/test1?characterEncoding=UTF-8
--username root
--password 123456
--table t_user
--num-mappers 1
--hive-import
--fields-terminated-by "t"
--hive-overwrite
--hive-database test_hive
--hive-table hive_user
vi a.txt
# 在文件中添加如下内容
1 zhangsan true 20 2020-01-11
2 lisi false 25 2020-01-10
3 wangwu true 36 2020-01-17
4 zhaoliu false 50 1990-02-08
5 win7 true 20 1991-02-08
#在hdfs上创建sqoop目录(目录名称随意,不过需要和后边对应),将文件上传到sqoop目录下
hdfs dfs -mkidr /sqoop
hdfs dfs -put a.txt /sqoop
--null-string '\N'
--null-non-string '\N'
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)