#!/bin/bash
source /etc/profile
if [ -n "$2" ] ;then
do_date=$2
else
do_date=`date -d "-1 day" +%F`
fi
ads_ks_redeem_code_df="
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
set mapred.job.queue.name=hive2;
set hive.exec.max.dynamic.partitions=2000;
set hive.execution.engine=spark;
SET hive.merge.mapredfiles=true;
SET hive.merge.mapfiles=true;
set hive.merge.size.per.task=1000000;
SET hive.merge.smallfiles.avgsize=1000000;
with duihuan as (select user_id
,a.product_id
,c.product_name
,batch_number -- 批次号
,substr(a.exchange_time,1,10) as exchange_time --兑换时间
,datediff(current_date(),to_date(a.exchange_time)) as diffday --兑换了第几天
from new_mid.mid_rights_exchange_di a -- 兑换码明细表
join new_dim.dim_produts_product_df c on a.product_id=c.product_id and c.dt = '$do_date'
WHERE to_date(a.exchange_time) >= '2021-10-01'
and a.product_id in ('4115','4104','4094','1666','4148','4213','4248','4249','4259','4260','4286','4283')
)
--故事--商品表
,story_prod as (select product_id
,story_id
from new_dim.dim_story_story_album_product_df
where product_id in ('4115','4104','4094','1666','4148','4213','4248','4249','4259','4260','4286','4283')
and dt = '$do_date'
group by product_id,story_id
)
--伴读主表
,banduzhu as (select bandu_id,
story_id
from new_dim.dim_story_ks_bandu_info_df where dt = '$do_date'
)
--伴读记录信息
,bandu_records as (select distinct substr(createtime,1,10) as createtime,
bandu_id,
user_id
from (select distinct from_unixtime(unix_timestamp(create_time)+28800,'yyyy-MM-dd') as createtime,
bandu_id,
user_id
from new_dim.dim_story_ks_bandu_records_df
where create_time >='2021-10-23 00:00:00' and create_time<= '2021-11-11 10:55:00' and dt = '$do_date'
union all
select distinct create_time as createtime,
bandu_id,
user_id
from new_dim.dim_story_ks_bandu_records_df
where create_time> '2021-11-11 10:55:00' and dt = '$do_date'
) t
)
insert overwrite table new_ads.ads_ks_redeem_code_df
select duihuan.user_id as user_id --用户ID
,duihuan.exchange_time as exchange_time --兑换时间
,duihuan.batch_number as batch_number --兑换码批次号
,min(bandu_records.createtime) as fst_my_production_time --第一次我的作品时间
,count(distinct bandu_records.createtime) as punch_card_days --打卡几天
,count(distinct banduzhu.story_id) as story_cts --不同诗词有多少首
,duihuan.product_id as product_id --商品ID
,duihuan.product_name as product_name --商品名称
,duihuan.diffday as diffday --兑换了第几天
from duihuan
join story_prod on duihuan.product_id =story_prod.product_id
join banduzhu on story_prod.story_id =banduzhu.story_id
join bandu_records on banduzhu.bandu_id =bandu_records.bandu_id and duihuan.user_id =bandu_records.user_id and duihuan.exchange_time <=bandu_records.createtime
group by duihuan.user_id, duihuan.exchange_time, duihuan.batch_number, duihuan.product_id, duihuan.product_name, duihuan.diffday
;
"
case $1 in
"ads_ks_redeem_code_df"){
hive -e "$ads_ks_redeem_code_df"
};;
"all"){
hive -e "$ads_ks_redeem_code_df"
};;
esac
|