-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path0_tensor.py
119 lines (80 loc) · 1.92 KB
/
0_tensor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
#%%
from __future__ import print_function
import torch
# %%
# 初期化しないと値はバラバラに
x = torch.empty(5, 3)
print(x)
# %%
# 乱数
x = torch.rand(5, 3)
print(x)
#%%
x = torch.zeros(5, 3, dtype=torch.long)
print(x)
#%%
x = torch.tensor([5.5, 3])
print(x)
#%%
x = x.new_ones(5, 3, dtype=torch.double) # new_* methods take in sizes
print(x)
x = torch.randn_like(x, dtype=torch.float) # override dtype!
print(x)
#%%
print(x.shape)
print(x.size())
#%%
y = torch.rand(5, 3)
print(x + y)
#%%
result = torch.empty(5, 3)
torch.add(x, y, out=result) # output先を指定
print(result)
#%%
print(x)
print(y)
y.add_(x) # add_ 値自体を置き換える
print(y)
#%%
x = torch.randn(4, 4)
y = x.view(16) # viewでreshape x自体は変化しないことに注意
z = x.view(-1, 8) # -1はよしなにやってねサイン
print(x.shape, y.shape, z.shape)
#%%
# view()して作ったテンソルも同じメモリ空間を指しているので、yに値を入れるとxも変化する
x = torch.randn(4, 4)
print(x)
y = x.view(16)
y[0:2] = 0
print(y)
print(x)
#%%
x = torch.randn(1)
print(x)
print(x.item()) # 普通のpythonの値を取り出す 値が一つだけの時しか使えない
#%%
a = torch.ones(5)
print(a, type(a))
b = a.numpy() # numpyのarrayに変換
print(b, type(b))
#%%
a.add_(1)
print(a)
print(b) # メモリを共有してるのでaを変えるとbも変化
# %%
import numpy as np
a = np.ones(5)
b = torch.from_numpy(a) # numpy -> torch
np.add(a, 1, out=a)
print(a)
print(b)
#%%
# GPUがある場合
if torch.cuda.is_available():
device = torch.device("cuda") # a CUDA device object
y = torch.ones_like(x, device=device) # directly create a tensor on GPU
x = x.to(device) # or just use strings ``.to("cuda")``
z = x + y
print(z)
print(z.to("cpu", torch.double)) # ``.to`` can also change dtype together!
# %%