20190729工作进展3

  1. graph_embedding.jl_jingyan_query_related_top_query_detailed
    取前两页, 保证正样本均衡, 1 : 2采样

  2. 正样本
    create table hs_tmp_100 as
    select se_keyword, item_list, is_p4p_item_list from graph_embedding.jl_jingyan_query_related_top_query_detailed
    where page_seq =1 and ds=max_pt('graph_embedding.jl_jingyan_query_related_top_query_detailed');

按照两列两两分开

add py /home/hengsong/query_co_video/src/hs_udtf_729_1.py;
CREATE FUNCTION hs_split_2colume AS hs_udtf_729_1.Processor USING hs_udtf_729_1.py;

from odps.udf import BaseUDTF
from odps.distcache import get_cache_table
import string
import random
import sys

class Processor(BaseUDTF):
    def __init__(self):
      pass

    def process(self, se_keyword, item_list, is_p4p_item_list):
        items = item_list.split(',')
        flags = is_p4p_item_list.split(',')
        for i in range(len(items)):
            self.forward(se_keyword, items[i], flags[i])

drop table hs_tmp_101;
yes
create table hs_tmp_101 as
select distinct se_keyword, item_id from
(select hs_split_2colume(se_keyword, item_list, is_p4p_item_list) as (se_keyword, item_id, flag) from hs_tmp_100)a where a.flag == 'false';

得到query表和title并编号,准备负采样:
create table hs_tmp_102 as select distinct se_keyword from hs_tmp_101;
create table hs_tmp_103 as select distinct item_id from hs_tmp_101;
create table hs_tmp_104 as select row_number()over() as query_id, se_keyword from hs_tmp_102;

drop table hs_tmp_105;
yes
create table hs_tmp_105 as select a.item_id, a.title from
(select item_id, title from tbcdm.dim_tb_itm where ds=MAX_PT('tbcdm.dim_tb_itm') and is_online='Y')a right join (select item_id from hs_tmp_103)b on a.item_id=b.item_id;

create table hs_tmp_105 as
select title, coalesce(get_json_object(body, '.entities.k0.item_id/l'),get_json_object(body, '.entities.k3.item_id/l'),get_json_object(body, '.entities.k4.item_id/l'),get_json_object(body, '.entities.k7.item_id/l'))as item_id from graph_embedding.jl_jingyan_query_related_video_pool where ds=max_pt('graph_embedding.jl_jingyan_query_related_video_pool');

  1. 负采样 : hs_dssm_train_v2_0

create table hs_dssm_train_v2_0 as
select b.query_id, a.item_id,1 as label from
(select * from hs_tmp_101)a join (select * from hs_tmp_104)b on a.se_keyword == b.se_keyword;

create table hs_tmp_106 as select int(rand() * 10000 + 1) as query_id, item_id from hs_dssm_train_v2_0;

insert into table hs_tmp_106 select int(rand() * 10000 + 1) as query_id, item_id from hs_dssm_train_v2_0;

去重

create table hs_tmp_107 as select a.* , b.query_id as query_idb, b.item_id as item_idb from
(select * from hs_tmp_106)a left join (select * from hs_tmp_105)b on a.query_id == b.query_id and a.item_id == b.item_id;

合并
insert into table hs_dssm_train_v2_0 select query_id, item_id, 0 as label from hs_tmp_107 where query_idb is NULL;

乱序

drop table hs_dssm_train_v2_1;
yes
create table hs_dssm_train_v2_1 lifecycle 30 as select * from hs_dssm_train_v2_0 DISTRIBUTE by random();

drop table hs_dssm_train_v2_0;
yes
create table hs_dssm_train_v2_0 lifecycle 30 as select * from hs_dssm_train_v2_1 DISTRIBUTE by random();

mainse分词
hs_tmp_104:query_id, se_keyword
hs_tmp_105:item_id, title
hs_dssm_train_v2_0

create table if not exists hs_tmp_108 LIFECYCLE 20 as select query_id, se_keyword, search_kg:alinlp_segment(se_keyword, "MAINSE", "0", "1") as words_mainse_ws from hs_tmp_104;

create table if not exists hs_tmp_109 LIFECYCLE 20 as select item_id, title, search_kg:alinlp_segment(title, "MAINSE", "0", "1") as words_mainse_ws from hs_tmp_105;

过滤

drop table hs_tmp_112;
yes
create table hs_tmp_112 as select bi_udf:bi_split_value(query_id, words_mainse_ws, " ") as (index, query_word) from hs_tmp_108;

drop table hs_tmp_113;
yes
create table hs_tmp_113 as select query_word, count(*) as freq from hs_tmp_112 group by query_word order by freq desc;

drop table hs_dirty_words_info_;
yes
create table hs_dirty_words_info_ as select query_word as words from hs_tmp_113 where freq < 3;

insert into table hs_dirty_words_info_ select * from zj_dirty_words_info_;

drop table hs_tmp_110;
yes
PAI -name FilterNoise -project algo_public
-DinputTableName=graph_embedding.hs_tmp_108
-DnoiseTableName=graph_embedding.hs_dirty_words_info_
-DoutputTableName=graph_embedding.hs_tmp_110
-DselectedColNames="words_mainse_ws"
-Dlifecycle=30;

drop table hs_tmp_111;
yes
PAI -name FilterNoise -project algo_public
-DinputTableName=graph_embedding.hs_tmp_109
-DnoiseTableName=graph_embedding.hs_dirty_words_info_
-DoutputTableName=graph_embedding.hs_tmp_111
-DselectedColNames="words_mainse_ws"
-Dlifecycle=30;

得到训练集
se_keyword_mainse_ws,title_mainse_ws,label

create table hs_tmp_114
as select c.se_keyword_mainse_ws, d.words_mainse_ws as title_mainse_ws, c.label from
(select a.words_mainse_ws as se_keyword_mainse_ws, b.* from(select * from hs_tmp_110)a right join (select * from hs_dssm_train_v2_0)b on a.query_id == b.query_id)c left join (select * from hs_tmp_111)d on c.item_id == d.item_id;

create table hs_tmp_115 as select * from hs_tmp_114 where lengthb(se_keyword_mainse_ws) > 0;

划分训练集和测试集:

drop table hs_train_data_dssm_v2_1;
yes
drop table hs_test_data_dssm_v2_1;
yes
PAI -name split -project algo_public
-DinputTableName=graph_embedding.hs_tmp_115
-Doutput1TableName=graph_embedding.hs_train_data_dssm_v2_1
-Doutput2TableName=graph_embedding.hs_test_data_dssm_v2_1
-Dfraction=0.8
-DmemSizePerCore=4096
-DcoreNum=100
;

开始训练:

pai -name tensorflow140 -Dscript="file:///home/hengsong/origin_deep_cluster_odps_8.tar.gz" -DentryFile="train_v4.py" -Dcluster='{"worker":{"count":30, "cpu":200, "memory":4000}, "ps":{"count":10, "cpu":200, "memory":5000}}' -Dtables="odps://graph_embedding/tables/hs_train_data_dssm_v2_1,odps://graph_embedding/tables/hs_test_data_dssm_v2_1" -DcheckpointDir="oss://bucket-automl/hengsong/?role_arn=acs:ram::1293303983251548:role/graph2018&host=cn-hangzhou.oss-internal.aliyun-inc.com" -DuserDefinedParameters="--learning_rate=3e-4 --batch_size=1024 --is_save_model=True --attention_type=1 --num_epochs=10 --ckpt=hs_ugc_video_3e_1.ckpt" -DuseSparseClusterSchema=True;

http://logview.odps.aliyun-inc.com:8080/logview/?h=http://service-corp.odps.aliyun-inc.com/api&p=graph_embedding&i=20190729132851584gu8q4gep2_fb55558b_8cd4_4f0c_a35a_95abfa30098c&token=YkpwcnVva2pVTmdabU9SMC9yRmZ4Q2xTZWkwPSxPRFBTX09CTzoxMjkzMzAzOTgzMjUxNTQ4LDE1NjUwMTE3MzMseyJTdGF0ZW1lbnQiOlt7IkFjdGlvbiI6WyJvZHBzOlJlYWQiXSwiRWZmZWN0IjoiQWxsb3ciLCJSZXNvdXJjZSI6WyJhY3M6b2RwczoqOnByb2plY3RzL2dyYXBoX2VtYmVkZGluZy9pbnN0YW5jZXMvMjAxOTA3MjkxMzI4NTE1ODRndThxNGdlcDJfZmI1NTU1OGJfOGNkNF80ZjBjX2EzNWFfOTVhYmZhMzAwOThjIl19XSwiVmVyc2lvbiI6IjEifQ==

你可能感兴趣的:(20190729工作进展3)