#!/bin/sh
# upload logs to hdfs
yesterday=`date --date='1 days ago' +%Y%m%d`
hive -e "
use stage;
create table tracklog_tmp (
dateday string,
datetime string,
ip string ,
cookieid string,
userid string,
logserverip string,
referer string,
requesturl string,
remark1 string,
remark2 string,
alexaflag string,
ua string,
wirelessflag string
)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ' ';"
hive -e "
use stage;
set hive.enforce.bucketing=true;
set hive.exec.compress.output=true;
set mapred.output.compress=true;
set mapred.output.compression.codec=org.apache.hadoop.io.compress.GzipCodec;
set io.compression.codecs=org.apache.hadoop.io.compress.GzipCodec;
load data local inpath '/diskg/logs/tracklog_192.168.1.1/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
load data local inpath '/diskg/logs/tracklog_192.168.1.2/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
load data local inpath '/diskg/logs/tracklog_192.168.1.3/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.1/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.2/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
load data local inpath '/diskg/logs/trackloguc_192.168.1.3/${yesterday}/${yesterday}????.dat' overwrite into table tracklog_tmp;
insert into table tracklog PARTITION (day='${yesterday}') select * from tracklog_tmp;
"
hive -e "
use stage;
drop table tracklog_tmp ;"