Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 82 additions & 0 deletions python/tvm/relay/frontend/paddlepaddle.py
Original file line number Diff line number Diff line change
Expand Up @@ -1394,6 +1394,22 @@ def convert_one_hot_v2(g, op, block):
g.add_node(op.output("Out")[0], out)


def convert_p_norm(g, op, blcok):
"""Operator converter for p_norm."""

x = g.get_node(op.input("X")[0])
axis = op.attr("axis")
p = op.attr("porder")
keepdim = op.attr("keepdim")
p_node = _expr.const(p, dtype="float32")
abs_node = _op.abs(x)
pow_node = _op.power(abs_node, p_node)
reduce_sum = _op.sum(pow_node, axis=[axis], keepdims=keepdim)
p_node1 = _expr.const(1.0 / p, dtype="float32")
out = _op.power(reduce_sum, p_node1)
g.add_node(op.output("Out")[0], out)


def convert_padding(g, op, block):
"""Operator converter for padding."""

Expand Down Expand Up @@ -1645,6 +1661,30 @@ def convert_reshape(g, op, block):
g.add_node(op.output("Out")[0], out)


def convert_roi_align(g, op, block):
"""Operator converter for roi_align."""

rois = g.get_node(op.input("ROIs")[0])
spatial_scale = op.attr("spatial_scale")
if op.attr("aligned"):
offset = _expr.const(0.5, dtype="float32")
roi_offset = _op.divide(offset, _expr.const(spatial_scale, dtype="float32"))
rois = _op.subtract(rois, roi_offset)
num_rois = infer_shape(rois)[0]
zero_node = _expr.const(0, dtype="int32")
batch_index = _op.full(zero_node, [num_rois, 1], dtype="float32")
rois = _op.concatenate([batch_index, rois], axis=1)
out = _op.vision.roi_align(
g.get_node(op.input("X")[0]),
rois,
pooled_size=[op.attr("pooled_height"), op.attr("pooled_width")],
spatial_scale=spatial_scale,
sample_ratio=op.attr("sampling_ratio"),
mode="avg",
)
g.add_node(op.output("Out")[0], out)


def convert_rnn(g, op, block):
"""Operator converter for rnn."""

Expand Down Expand Up @@ -2166,6 +2206,45 @@ def convert_softmax(g, op, block):
g.add_node(op.output("Out")[0], out)


def convert_softmax_with_cross_entropy(g, op, block):
"""Operator converter for softmax_with_cross_entropy."""

logits = g.get_node(op.input("Logits")[0])
labels = g.get_node(op.input("Label")[0])
ignore_index = op.attr("ignore_index")
axis = op.attr("axis")
if axis < 0:
axis = len(infer_shape(logits)) + axis

softmax = _op.nn.softmax(logits, axis=axis)

g.add_node(op.output("Softmax")[0], softmax)

softmax = _op.log(softmax)
soft_label = op.attr("soft_label")
if soft_label:
loss = _op.sum(-labels * softmax, axis=axis)
else:
labels_one = _op.one_hot(
labels,
on_value=_expr.const(1.0, dtype="float32"),
off_value=_expr.const(0.0, dtype="float32"),
depth=infer_shape(logits)[axis],
axis=axis + 1,
dtype="float32",
)
labels_one = _op.squeeze(labels_one, axis=axis)
loss = _op.sum(-labels_one * softmax, axis=axis)
loss = _op.expand_dims(loss, axis=axis)
if ignore_index != -100: # noly when soft_label is False
assert not soft_label, "soft_label and ignore_index cannot be set at the same time."
ignore_mask = _op.not_equal(labels, _expr.const(ignore_index, dtype="int64"))
ignore_mask = _op.cast(ignore_mask, "float32")
loss = _op.multiply(loss, ignore_mask)

g.add_node(op.output("Loss")[0], loss)


def convert_softplus(g, op, block):
"""Operator converter for softplus."""

Expand Down Expand Up @@ -2556,6 +2635,7 @@ def convert_where_index(g, op, block):
"norm": convert_norm,
"not_equal": convert_elementwise_op,
"one_hot_v2": convert_one_hot_v2,
"p_norm": convert_p_norm,
"pad1d": convert_padding,
"pad2d": convert_padding,
"pad3d": convert_padding,
Expand All @@ -2568,6 +2648,7 @@ def convert_where_index(g, op, block):
"relu6": convert_relu6,
"reshape2": convert_reshape,
"round": convert_unary_op,
"roi_align": convert_roi_align,
"reciprocal": convert_reciprocal,
"reduce_all": convert_reduce,
"reduce_any": convert_reduce,
Expand All @@ -2591,6 +2672,7 @@ def convert_where_index(g, op, block):
"size": convert_size,
"slice": convert_slice,
"softmax": convert_softmax,
"softmax_with_cross_entropy": convert_softmax_with_cross_entropy,
"softplus": convert_softplus,
"softsign": convert_softsign,
"softshrink": convert_softshrink,
Expand Down
83 changes: 83 additions & 0 deletions tests/python/frontend/paddlepaddle/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -2302,5 +2302,88 @@ def forward(self, x, y):
verify_model(Dist(), input_data=[y, v])


@tvm.testing.uses_gpu
def test_forward_p_norm():
class PNorm(nn.Layer):
def __init__(self, axis, keepdim, p=1):
super(PNorm, self).__init__()
self.p = p
self.axis = axis
self.keepdim = keepdim

@paddle.jit.to_static
def forward(self, input_data):
return paddle.norm(input_data, p=self.p, axis=self.axis, keepdim=self.keepdim)

input_data = paddle.rand((2, 2, 3), dtype="float32")
verify_model(PNorm(axis=0, keepdim=True), input_data=input_data)
verify_model(PNorm(axis=0, keepdim=False), input_data=input_data)
verify_model(PNorm(axis=1, keepdim=True, p=1.5), input_data=input_data)
verify_model(PNorm(axis=-1, keepdim=True, p=3.4), input_data=input_data)


@tvm.testing.uses_gpu
def test_forward_roi_align():
class RoiAlign(nn.Layer):
def __init__(self, spatial_scale=1.0, sampling_ratio=-1, aligned=False):
super(RoiAlign, self).__init__()
self.spatial_scale = spatial_scale
self.sampling_ratio = sampling_ratio
self.aligned = aligned

@paddle.jit.to_static
def forward(self, input_data, rois, rois_num):
return paddle.vision.ops.roi_align(
input_data, rois, rois_num, 3, self.spatial_scale, self.sampling_ratio, self.aligned
)

input_data = paddle.rand((1, 128, 32, 32), dtype="float32")
boxes = paddle.rand([3, 4])
boxes[:, 2] += boxes[:, 0] + 3
boxes[:, 3] += boxes[:, 1] + 4
boxes_num = paddle.to_tensor([3]).astype("int32")
verify_model(RoiAlign(), input_data=[input_data, boxes, boxes_num])
verify_model(RoiAlign(aligned=True), input_data=[input_data, boxes, boxes_num])
verify_model(
RoiAlign(spatial_scale=2.0, aligned=True), input_data=[input_data, boxes, boxes_num]
)


@tvm.testing.uses_gpu
def test_forward_softmax_with_cross_entropy():
class SoftmaxWithCrossEntropy(nn.Layer):
def __init__(self, soft_label=False, ignore_index=-100, return_softmax=False, axis=-1):
super(SoftmaxWithCrossEntropy, self).__init__()
self.soft_label = soft_label
self.ignore_index = ignore_index
self.return_softmax = return_softmax
self.axis = axis

@paddle.jit.to_static
def forward(self, input_data, label):
return paddle.nn.functional.softmax_with_cross_entropy(
input_data,
label,
soft_label=self.soft_label,
ignore_index=self.ignore_index,
return_softmax=self.return_softmax,
axis=self.axis,
)

input_data = paddle.rand([5, 3], dtype="float32")
label = paddle.randint(0, 2, [5, 1])
verify_model(SoftmaxWithCrossEntropy(), input_data=[input_data, label])
verify_model(SoftmaxWithCrossEntropy(return_softmax=True), input_data=[input_data, label])
verify_model(
SoftmaxWithCrossEntropy(return_softmax=True, ignore_index=1), input_data=[input_data, label]
)
input_data = paddle.rand([5, 4, 3], dtype="float32")
label = paddle.randint(0, 2, [5, 1, 3])
verify_model(SoftmaxWithCrossEntropy(axis=1), input_data=[input_data, label])
label = paddle.randint(0, 2, [5, 4, 3]).astype("float32")
verify_model(SoftmaxWithCrossEntropy(soft_label=True), input_data=[input_data, label])
verify_model(SoftmaxWithCrossEntropy(soft_label=True, axis=0), input_data=[input_data, label])


if __name__ == "__main__":
tvm.testing.main()