Skip to content

Commit

Permalink
add custom op demo
Browse files Browse the repository at this point in the history
  • Loading branch information
yangxudong committed Oct 17, 2023
1 parent e18caf0 commit e0a8a1d
Show file tree
Hide file tree
Showing 7 changed files with 97 additions and 6 deletions.
17 changes: 11 additions & 6 deletions easy_rec/python/layers/backbone.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def __init__(self, config, features, input_layer, l2_reg=None):
self._dag.add_node(block.name)
self._name_to_blocks[block.name] = block
layer = block.WhichOneof('layer')
if layer == 'input_layer':
if layer in {'input_layer', 'raw_input'}:
if len(block.inputs) != 1:
raise ValueError('input layer `%s` takes only one input' % block.name)
one_input = block.inputs[0]
Expand All @@ -73,8 +73,11 @@ def __init__(self, config, features, input_layer, l2_reg=None):
logging.warning('input `%s` already exists in other block' % group)
else:
input_feature_groups.add(group)
input_fn = EnhancedInputLayer(self._input_layer, self._features,
group, reuse)
if layer == 'input_layer':
input_fn = EnhancedInputLayer(self._input_layer, self._features,
group, reuse)
else:
input_fn = self._input_layer.get_raw_features(self._features, group)
self._name_to_layer[block.name] = input_fn
else:
self.define_layers(layer, block, block.name, reuse)
Expand All @@ -92,7 +95,7 @@ def __init__(self, config, features, input_layer, l2_reg=None):
num_pkg_input = 0
for block in config.blocks:
layer = block.WhichOneof('layer')
if layer == 'input_layer':
if layer in {'input_layer', 'raw_input'}:
continue
name = block.name
if name in input_feature_groups:
Expand Down Expand Up @@ -269,6 +272,8 @@ def call(self, is_training, **kwargs):
if layer is None: # identity layer
output = self.block_input(config, block_outputs, is_training)
block_outputs[block] = output
elif layer == 'raw_input':
block_outputs[block] = self._name_to_layer[block]
elif layer == 'input_layer':
input_fn = self._name_to_layer[block]
input_config = config.input_layer
Expand Down Expand Up @@ -327,10 +332,10 @@ def load_keras_layer(self, layer_conf, name, reuse=None):
else:
layer = layer_cls(params, name=name)
return layer, customize
elif param_type is None: # internal keras layer
elif param_type is None: # internal keras layer without extra params
layer = layer_cls(name=name)
return layer, customize
else:
else: # internal keras layer with extra params
assert param_type == 'st_params', 'internal keras layer only support st_params'
try:
kwargs = convert_to_dict(layer_conf.st_params)
Expand Down
15 changes: 15 additions & 0 deletions easy_rec/python/layers/input_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,21 @@ def get_sequence_feature(self, features, group_name):
self._embedding_regularizer, weights_list=embedding_reg_lst)
return seq_features

def get_raw_features(self, features, group_name):
"""Get features by group_name.
Args:
features: input tensor dict
group_name: feature_group name
Return:
features: all raw features in list
"""
assert group_name in self._feature_groups, 'invalid group_name[%s], list: %s' % (
group_name, ','.join([x for x in self._feature_groups]))
feature_group = self._feature_groups[group_name]
return [features[x] for x in feature_group.feature_names]

def __call__(self, features, group_name, is_combine=True, is_dict=False):
"""Get features by group_name.
Expand Down
1 change: 1 addition & 0 deletions easy_rec/python/layers/keras/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from .blocks import Gate
from .blocks import Highway
from .bst import BST
from .custom_ops import EditDistance
from .data_augment import SeqAugment
from .din import DIN
from .fibinet import BiLinear
Expand Down
43 changes: 43 additions & 0 deletions easy_rec/python/layers/keras/custom_ops.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# -*- encoding:utf-8 -*-
# Copyright (c) Alibaba, Inc. and its affiliates.
"""Convenience blocks for using custom ops."""
import logging
import os

import tensorflow as tf

import easy_rec

LIB_PATH = tf.sysconfig.get_link_flags()[0][2:]
LD_LIBRARY_PATH = os.getenv('LD_LIBRARY_PATH')
os.environ['LD_LIBRARY_PATH'] = ':'.join([LIB_PATH, LD_LIBRARY_PATH])
logging.info('set LD_LIBRARY_PATH=%s' % os.getenv('LD_LIBRARY_PATH'))


class EditDistance(tf.keras.layers.Layer):

def __init__(self, params, name='edit_distance', reuse=None, **kwargs):
super(EditDistance, self).__init__(name, **kwargs)

custom_op_path = os.path.join(easy_rec.ops_dir, 'libedit_distance.so')
try:
custom_ops = tf.load_op_library(custom_op_path)
logging.info('load edit_distance op from %s succeed' % custom_op_path)
except Exception as ex:
logging.warning('load edit_distance op from %s failed: %s' %
(custom_op_path, str(ex)))
custom_ops = None
self.edit_distance = custom_ops.edit_distance_op

def call(self, inputs, training=None, **kwargs):
input1, input2 = inputs[:2]
print('input1:', input1)
print('input2:', input2)
str1 = tf.sparse.to_dense(input1, default_value='')
str2 = tf.sparse.to_dense(input1, default_value='')
print('str1:', str1)
print('str2:', str2)
dist = self.edit_distance(str1, str2, dtype=tf.float32)
print('dist:', dist)
dist = tf.reshape(dist, [-1, 1])
return dist
Binary file added easy_rec/python/ops/1.12_pai/libedit_distance.so
Binary file not shown.
23 changes: 23 additions & 0 deletions easy_rec/python/ops/edit_distance_op.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
import logging
import os

import tensorflow as tf
from tensorflow.python.util.tf_export import tf_export

import easy_rec

custom_op_path = os.path.join(easy_rec.ops_dir, 'libedit_distance.so')
print('custom op path: %s' % custom_op_path)

try:
custom_ops = tf.load_op_library(custom_op_path)
logging.info('load edit_distance op from %s succeed' % custom_op_path)
except Exception as ex:
print('custom op path: %s' % custom_op_path)
logging.warning('load edit_distance op failed: %s' % str(ex))
custom_ops = None


@tf_export('edit_distance')
def edit_distance(input1, input2):
return custom_ops.edit_distance_op(input1, input2)
4 changes: 4 additions & 0 deletions easy_rec/python/protos/backbone.proto
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@ message InputLayer {
optional bool concat_seq_feature = 10 [default = true];
}

message RawInputLayer {
}

message Lambda {
required string expression = 1;
}
Expand Down Expand Up @@ -76,6 +79,7 @@ message Block {
KerasLayer keras_layer = 103;
RecurrentLayer recurrent = 104;
RepeatLayer repeat = 105;
RawInputLayer raw_input = 106;
}
}

Expand Down

0 comments on commit e0a8a1d

Please sign in to comment.