Skip to content

Commit e5f46e4

Browse files
committed
relu:实现
1 parent 7209987 commit e5f46e4

10 files changed

Lines changed: 93 additions & 20 deletions

File tree

excuter/op-mem-ompsimd/src/deepx/op/opfactory.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ namespace deepx::op
1717
opfactory.add_op(NewTensor<int64_t>());
1818
opfactory.add_op(NewTensor<float>());
1919
opfactory.add_op(NewTensor<double>());
20-
2120
opfactory.add_op(ArgSet<int32_t>());
2221
opfactory.add_op(ArgSet<float>());
2322
opfactory.add_op(ArgSet<double>());
@@ -124,6 +123,7 @@ namespace deepx::op
124123
register_new(opfactory);
125124
register_init(opfactory);
126125
register_print(opfactory);
126+
register_transpose(opfactory);
127127
register_elementwise_op(opfactory);
128128
register_concat(opfactory);
129129
register_matmul(opfactory);

front/py/deepx/__init__.py

Lines changed: 15 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,23 @@
11
from .tensor import Tensor,Shape,Device,DeviceType
2-
from deepx.nn.functional import full,zeros,ones,arange,rand,randn,eye
3-
from deepx.nn.functional import add,sub,mul,div,clamp
4-
from deepx.nn.functional import matmul
5-
from deepx.nn.functional import max,min,sum,prod,mean
2+
from deepx.nn.functional import *
63
__all__ = [
4+
#tensor
75
'Tensor',
86
'Shape',
97
'Device','DeviceType',
10-
#init
11-
'full','zeros', 'ones', 'arange', 'rand', 'randn', 'eye',
12-
#elementwise
13-
"add","sub","mul","div","clamp",
14-
#matmul
15-
"matmul",
16-
#reduce
17-
"max","min","sum","prod","mean",
8+
#nn.functional
9+
#init
10+
'full','zeros', 'ones', 'arange', 'rand', 'randn', 'eye',
11+
#elementwise
12+
"add","sub","mul","div","clamp",
13+
#matmul
14+
"matmul",
15+
#reduce
16+
"max","min","sum","prod","mean",
17+
#transpose
18+
"transpose",
19+
#relu
20+
"relu",
1821
]
1922

2023
# 为了支持 import deepx as dx 的用法

front/py/deepx/nn/functional/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from .init import constant,full,zeros,ones,arange,rand,randn,eye
66
from .reduce import max,min,sum,prod,mean
77
from .transpose import transpose
8+
from .activite import relu
89

910
__all__ = [
1011
"newtensor",
@@ -14,4 +15,5 @@
1415
"matmul",
1516
"max","min","sum","prod","mean",
1617
"transpose",
18+
"relu",
1719
]
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
from deepx.tensor import Tensor
2+
from deepx.nn.deepxir import DeepxIR
3+
from deepx.scheduler import send
4+
5+
def relu(t: Tensor,inplace:bool=False)->Tensor:
6+
out=t
7+
if not inplace:
8+
out=Tensor(shape=t.shape, dtype=t.dtype, device=t.device)
9+
ir=DeepxIR("max_scalar",t.dtype,[t._node.name,0], [out._node.name])
10+
send(str(ir))
11+
return out
12+

front/py/deepx/nn/functional/transpose.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,11 @@
55
def transpose(t: Tensor,dimorder:list[int]=None,out:Tensor=None):
66
if dimorder is None:
77
dimorder=list(range(t.ndimension))
8-
ir=DeepxIR("transpose",'any',[t._node.name,*map(str, dimorder)], [out._node.name])
8+
9+
if out is None:
10+
out=Tensor(shape=t.Shape.transpose(dimorder), dtype=t.dtype, device=t.device)
11+
ir=DeepxIR("transpose",'',[t._node.name,*map(str, dimorder)], [out._node.name])
912
send(str(ir))
13+
1014
return out
1115

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
from deepx.tensor import Tensor
2+
import deepx.nn.functional as F
3+
from .module import Module
4+
5+
#copy from pytorch
6+
class ReLU(Module):
7+
r"""Applies the rectified linear unit function element-wise.
8+
9+
:math:`\text{ReLU}(x) = (x)^+ = \max(0, x)`
10+
11+
Args:
12+
inplace: can optionally do the operation in-place. Default: ``False``
13+
14+
Shape:
15+
- Input: :math:`(*)`, where :math:`*` means any number of dimensions.
16+
- Output: :math:`(*)`, same shape as the input.
17+
18+
.. image:: ../scripts/activation_images/ReLU.png
19+
20+
Examples::
21+
22+
>>> m = nn.ReLU()
23+
>>> input = torch.randn(2)
24+
>>> output = m(input)
25+
26+
27+
An implementation of CReLU - https://arxiv.org/abs/1603.05201
28+
29+
>>> m = nn.ReLU()
30+
>>> input = torch.randn(2).unsqueeze(0)
31+
>>> output = torch.cat((m(input), m(-input)))
32+
"""
33+
34+
__constants__ = ["inplace"]
35+
inplace: bool
36+
37+
def __init__(self, inplace: bool = False):
38+
super().__init__()
39+
self.inplace = inplace
40+
41+
def forward(self, input: Tensor) -> Tensor:
42+
return F.relu(input, inplace=self.inplace)
43+
44+
def extra_repr(self) -> str:
45+
inplace_str = "inplace=True" if self.inplace else ""
46+
return inplace_str

front/py/deepx/tensor/shape.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,4 +85,8 @@ def __hash__(self):
8585
"""使Shape可哈希,便于在字典和集合中使用"""
8686
return hash(self.shape)
8787

88+
def transpose(self,dimorder:list[int]=None):
89+
if dimorder is None:
90+
dimorder=list(range(self.ndimension))
91+
return Shape(tuple(self.shape[i] for i in dimorder))
8892

front/py/deepx/tensor/tensor.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -54,6 +54,9 @@ def __init__(
5454
@property
5555
def shape(self):
5656
return self._shape.shape
57+
@property
58+
def Shape(self):
59+
return self._shape
5760

5861
@property
5962
def stride(self):

front/py/deepx/tensor/transpose.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
from .tensor import Tensor,tensor_method
22

33
@tensor_method
4-
def transpose(self,dimorder:list[int]=None, out:Tensor=None):
5-
result = Tensor(dtype=self.dtype,shape=self.shape)
4+
def transpose(self,*axes):
65
from deepx.nn.functional import transpose as transpose_func
7-
transpose_func(self,dimorder,result)
6+
result=transpose_func(self,axes)
87
return result
98

109
@tensor_method
11-
def transpose_(self,dimorder:list[int]=None):
10+
def transpose_(self,*axes):
1211
from deepx.nn.functional import transpose as transpose_func
13-
transpose_func(self,dimorder,self)
12+
transpose_func(self,axes,self)
1413
return self

front/py/examples/2_ir/4_transpose.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,5 @@
33
print()
44

55
t1 = ones([3,4],dtype='float32')
6-
t2=t1.transpose(dimorder=[1,0])
6+
t2=t1.transpose(1,0)
77
print(t2)

0 commit comments

Comments
 (0)