#数据库连接
sqlConnect="\jdbc:sqlserver://172.16.177.45:1433;\
username=sa;\
password=123456;\
database=Test;"
checkColumn="RecoID"
condiColumn="[记录时间]"
#导入数据的范围
endTime="2016-08-15"
dstTabarray=(
"000B_7F14_IDUStateInfo_RSimulateData"
#"000B_7F14_IDUStateInfo_RSwitchData"
#"000B_7F14_ODUStateInfo_DTUWRSimulateData"
#"000B_7F14_ODUStateInfo_WRSimulateData"
#"000B_7F14_ODUStateInfo_WRSwitchData"
)
#hive数据库
hiveDbname=BIZ1
#hive数据库中的表名
hivetablename=000B_data
#hive临时表明
tempTabname=000B_dataTemp
#临时表存放的目录
tempTabPath=/user/hive/biz1/extend/wp04/${tempTabname}
#正式表文件存放的目录
hivetablePath=/user/hive/biz1/extend/wp04/${hivetablename}
#hive表列的名称
hivetableCols="RowKey string,\
RecoTime timestamp,\
ProjID bigint,\
DevID bigint,\
DevAddr int,\
FrameNO int,\
ReceiveTime timestamp,\
ModifyFlag string,\
tablename string,\
RecoID bigint,\
ProtocolVer_DB string,\
ModelID_DB string,\
RemoteOnOffFunc smallint,\
ForbIDComp1 smallint,\
ForbIDComp2 smallint,\
OnOffModeSet smallint,\
RunModeSet smallint,\
Chill_LWT_Set float,\
Comp2FreqProAlarm smallint"
WRSI="select reverse(right('0000000000'+ltrim((select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF')),10))+replace(CONVERT(varchar(12),a.[记录时间],108),':','')+'7F14'+right('000000'+ltrim([Reco_ID]),6)as RowKey\
,a.[记录时间] as RecoTime\
,b.Proj_ID as ProjID\
,(select top 1 c.[ProjDev_ID] from Proj_Dev c where c.Proj_ID=b.Proj_ID and c.[机组条码]='FFFFFFFFFFFFFFF') as DevID\
,'224' as DevAddr\
,'' as FrameNO\
,'' as ReceiveTime\
,'' as ModifyFlag\
,'7F14' as tablename\
,a.[Reco_ID] as RecoID\
,a.[协议版本] as ProtocolVer_DB\
,a.[机型ID] as ModelID_DB\
,'' as Comp2FreqProAlarm"
,'' as Comp2FreqProAlarm"
,'' as Comp2FreqProAlarm"
,[压缩机二频繁保护报警] as Comp2FreqProAlarm"
for dstTabname in ${dstTabarray[@]}; do whereStr=" from [dbo].[${dstTabname}] a left join [dbo].[Proj_Dev] b on a.ProjDev_ID=b.ProjDev_ID" sql="" case ${dstTabname} in "000B_7F14_IDUStateInfo_RSimulateData" ) sql=${RSI}${whereStr};; "000B_7F14_IDUStateInfo_RSwitchData" ) sql=${RSW}${whereStr};; "000B_7F14_ODUStateInfo_DTUWRSimulateData" ) sql=${DTU}${whereStr};; "000B_7F14_ODUStateInfo_WRSimulateData" ) sql=${WRSI}${whereStr};; "000B_7F14_ODUStateInfo_WRSwitchData" ) sql=${WRSW}${whereStr};; esac #导入数据到hive-D mapred.job.queue.name=production sqoop import -D mapred.job.queue.name=production --connect "${sqlConnect}" \ --query "select t.* from (${sql} where ${condiColumn}>='${endTime}') t WHERE \$CONDITIONS" \ --split-by ${checkColumn} \ --fIElds-terminated-by '\t' \ --lines-terminated-by '\n' \ --delete-target-dir \ --target-dir ${tempTabPath} \ -m 20 hive -e "\ use ${hiveDbname}; \ drop table if exists ${tempTabname}; \ create external table ${tempTabname} (${hivetableCols}) \ row format delimited fIElds terminated by '\t' \ location \"${tempTabPath}\"; \ use ${hiveDbname};\ set mapreduce.job.queuename=production; \ set hive.execution.engine=mr;\ set hive.exec.dynamic.partition=true; \ set hive.exec.dynamic.partition.mode=nonstrict; \ set hive.exec.compress.output=true;\ set mapred.output.compression.codec=org.apache.hadoop.io.compress.SnappyCodec;\ set PARQUET_COMPRESSION_CODE=snappy;\ set mapred.max.split.size=10000000;\ insert into table ${hivetablename} partition(year,month,day) \ select *,year(RecoTime),month(RecoTime),day(RecoTime) from ${tempTabname};\ drop table ${tempTabname};" hadoop fs -rm -r ${tempTabPath} echo "load ${dstTabname} Data over" echo "" echo "" echo "------------------------------------------------------------------------------------------" done echo "load Data over" echo "hive完成----------------------" 总结以上是内存溢出为你收集整理的使用sqoop1,将sqlserver数据导入hive全部内容,希望文章能够帮你解决使用sqoop1,将sqlserver数据导入hive所遇到的程序开发问题。
如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。
欢迎分享,转载请注明来源:内存溢出
评论列表(0条)