Can I ask how to use reRu function with tensor in pytorch?

Im studying deep learning as electrical engineer.And I don’t familiar with Python.
Below is my code that practiced. I want to use reRu fun as activate function.
But terminal told that reRu function can’t be used with tensor.
Can I get some advise?

import torch
import torch.optim as optim
import torch.nn
import numpy as np
######sigmoid-> Relu 함수로 개선 필요
torch.manual_seed(1)
x_data=torch.FloatTensor([[0,0],[0,1],[1,0],[1,1]])
y_data=torch.FloatTensor([[0],[1],[1],[0]])
W_1=torch.rand([2,10],requires_grad=True)
W_2=torch.rand([10,1],requires_grad=True)
b_1=torch.rand([1,10],requires_grad=True)
b_2=torch.rand([1],requires_grad=True)

for i in range(100000):
layer1=torch.sigmoid((torch.matmul(x_data,W_1)+b_1))
H=torch.tanh(torch.matmul(layer1,W_2)+b_2)
cost=torch.mean(-y_data*torch.log10(H)-(1-y_data)*torch.log10(1-H))

optimizer=optim.SGD([W_1,W_2,b_1,b_2], lr=0.05)
optimizer.zero_grad()
cost.backward()
optimizer.step()
def result(n,m):
    k=torch.FloatTensor([n,m])
    layer1_1 = torch.sigmoid(torch.matmul(k, W_1) + b_1)
    H = torch.sigmoid(torch.matmul(layer1_1 , W_2) + b_2)
    if H>=0.5:
        return 1
    else:
        return 0

print(result(0,0))
print(result(0,1))
print(result(1,0))
print(result(1,1))