*Memos:
tile() can get the 1D or more D tensor of zero or more repeated elements from the 0D or more D tensor of zero or more elements as shown below:
*Memos:
-
tile()
can be used with torch or a tensor. - The 1st argument(
input
) withtorch
or using a tensor(Required-Type:tensor
ofint
,float
,complex
orbool
). - The 2nd argument with
torch
or the 1st or more arguments with a tensor aredims
(Required-Type:tuple
ofint
,list
ofint
or size()): *Memos:- If at least one dimension is
0
, an empty tensor is returned. -
dims=
mustn't be used for the one or more dimensions without a tuple or list.
- If at least one dimension is
import torch
my_tensor = torch.tensor([7, 4, 2])
torch.tile(input=my_tensor, dims=(0,))
my_tensor.tile(dims=(0,))
my_tensor.tile(0,)
torch.tile(input=my_tensor, dims=torch.tensor([]).size())
# tensor([], dtype=torch.int64)
torch.tile(input=my_tensor, dims=())
torch.tile(input=my_tensor, dims=(1,))
torch.tile(input=my_tensor, dims=torch.tensor(5).size())
torch.tile(input=my_tensor, dims=torch.tensor([5]).size())
# tensor([7, 4, 2])
torch.tile(input=my_tensor, dims=(2,))
torch.tile(input=my_tensor, dims=torch.tensor([5, 8]).size())
# tensor([7, 4, 2, 7, 4, 2])
torch.tile(input=my_tensor, dims=(3,))
torch.tile(input=my_tensor, dims=torch.tensor([5, 8, 1]).size())
# tensor([7, 4, 2, 7, 4, 2, 7, 4, 2])
etc.
torch.tile(input=my_tensor, dims=(1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[5]]).size())
# tensor([[7, 4, 2]])
torch.tile(input=my_tensor, dims=(1, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[5, 8]]).size())
# tensor([[7, 4, 2, 7, 4, 2]])
torch.tile(input=my_tensor, dims=(1, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[5, 8, 1]]).size())
# tensor([[7, 4, 2, 7, 4, 2, 7, 4, 2]])
etc.
torch.tile(input=my_tensor, dims=(2, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[5], [8]]).size())
# tensor([[7, 4, 2],
# [7, 4, 2]])
torch.tile(input=my_tensor, dims=(2, 2))
torch.tile(input=my_tensor, dims=torch.tensor([[5, 8], [1, 9]]).size())
# tensor([[7, 4, 2, 7, 4, 2],
# [7, 4, 2, 7, 4, 2]])
torch.tile(input=my_tensor, dims=(2, 3))
torch.tile(input=my_tensor, dims=torch.tensor([[5, 8, 1], [9, 3, 0]]).size())
# tensor([[7, 4, 2, 7, 4, 2, 7, 4, 2],
# [7, 4, 2, 7, 4, 2, 7, 4, 2]])
etc.
torch.tile(input=my_tensor, dims=(3, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[5], [8], [1]]).size())
# tensor([[7, 4, 2],
# [7, 4, 2],
# [7, 4, 2]])
etc.
torch.tile(input=my_tensor, dims=(1, 1, 1))
torch.tile(input=my_tensor, dims=torch.tensor([[[5]]]).size())
# tensor([[[7, 4, 2]]])
etc.
torch.tile(input=my_tensor, dims=(3, 2, 1))
# tensor([[[7, 4, 2], [7, 4, 2]],
# [[7, 4, 2], [7, 4, 2]],
# [[7, 4, 2], [7, 4, 2]]])
torch.tile(input=my_tensor, dims=(3, 0, 1))
# tensor([], size=(3, 0, 3), dtype=torch.int64)
my_tensor = torch.tensor([[7, 4, 2],
[5, 1, 6]])
torch.tile(input=my_tensor, dims=(3, 2))
# tensor([[7, 4, 2, 7, 4, 2],
# [5, 1, 6, 5, 1, 6],
# [7, 4, 2, 7, 4, 2],
# [5, 1, 6, 5, 1, 6],
# [7, 4, 2, 7, 4, 2],
# [5, 1, 6, 5, 1, 6]])
my_tensor = torch.tensor([[7., 4., 2.],
[5., 1., 6.]])
torch.tile(input=my_tensor, dims=(3, 2))
# tensor([[7., 4., 2., 7., 4., 2.],
# [5., 1., 6., 5., 1., 6.],
# [7., 4., 2., 7., 4., 2.],
# [5., 1., 6., 5., 1., 6.],
# [7., 4., 2., 7., 4., 2.],
# [5., 1., 6., 5., 1., 6.]])
my_tensor = torch.tensor([[7.+0.j, 4.+0.j, 2.+0.j],
[5.+0.j, 1.+0.j, 6.+0.j]])
torch.tile(input=my_tensor, dims=(3, 2))
# tensor([[7.+0.j, 4.+0.j, 2.+0.j, 7.+0.j, 4.+0.j, 2.+0.j],
# [5.+0.j, 1.+0.j, 6.+0.j, 5.+0.j, 1.+0.j, 6.+0.j],
# [7.+0.j, 4.+0.j, 2.+0.j, 7.+0.j, 4.+0.j, 2.+0.j],
# [5.+0.j, 1.+0.j, 6.+0.j, 5.+0.j, 1.+0.j, 6.+0.j],
# [7.+0.j, 4.+0.j, 2.+0.j, 7.+0.j, 4.+0.j, 2.+0.j],
# [5.+0.j, 1.+0.j, 6.+0.j, 5.+0.j, 1.+0.j, 6.+0.j]])
my_tensor = torch.tensor([[True, False, True],
[False, True, False]])
torch.tile(input=my_tensor, dims=(3, 2))
# tensor([[True, False, True, True, False, True],
# [False, True, False, False, True, False],
# [True, False, True, True, False, True],
# [False, True, False, False, True, False],
# [True, False, True, True, False, True],
# [False, True, False, False, True, False]])