预备知识
2.1 数据操作
import torch
x = torch.arange(12)
print(x.shape)
print(torch.Size(x))
print(x.numel())
X = x.reshape(3, 4)
print(X)
print(torch.ones((2, 3, 4)))
print(torch.randn(3, 4))
print(torch.tensor([[2, 1, 3, 4], [1, 2, 3, 4], [3, 4, 5, 6]]))
x = torch.tensor([1.0, 2, 4, 8])
y = torch.tensor([2, 2, 2, 2])
print(x + y, x - y, x * y, x / y, x ** y) # **运算符是求幂运算
print(torch.exp(x))
X = torch.arange(12, dtype=torch.float32).reshape((3,4))
Y = torch.tensor([[2.0, 1, 4, 3], [1, 2, 3, 4], [4, 3, 2, 1]])
print(torch.cat((X, Y), dim=0))
print(torch.cat((X, Y), dim=1))
print(X == Y)
print(X.sum())
a = torch.arange(3).reshape((3, 1))
b = torch.arange(2).reshape((1, 2))
print(a)
print(b)
print(a+b)
print(X[-1])
print(X[1:3])
X[1, 2] = 9
print(X)
X[0:2, :] = 12
print(X)
before = id(Y)
Y = Y + X
print(id(Y) == before)
Z = torch.zeros_like(Y)
print('id(Z):', id(Z))
Z[:] = X + Y
print('id(Z):', id(Z))
A = X.numpy()
B = torch.tensor(A)
print(type(A))
print(type(B))
a = torch.tensor([3.5])
print(a)
print(a.item())
print(float(a))
print(int(a))
2.2 数据预处理
import pandas as pd
data = pd.read_csv(data_file)
inputs, outputs = data.iloc[:, 0:2], data.iloc[:, 2]
inputs = inputs.fillna(inputs.mean())
print(inputs)
import torch
X = torch.tensor(inputs.to_numpy(dtype=float))
y = torch.tensor(outputs.to_numpy(dtype=float))
print(x)
print(y)
2.3 线性代数
import torch
x = torch.tensor(3.0)
y = torch.tensor(2.0)
print(x + y)
print(x * y)
print(x / y)
print(x**y)
x = torch.arange(4)
print(x)
print(x[3])
print(len(x))
print(x.shape)
A = torch.arange(20).reshape(5, 4)
print(A)
print(A.T)
B = torch.tensor([[1, 2, 3], [2, 0, 4], [3, 4, 5]])
print(B)
print(B == B.T)
print(X = torch.arange(24).reshape(2, 3, 4))
print(X)
A = torch.arange(20, dtype=torch.float32).reshape(5, 4)
B = A.clone() # 通过分配新内存,将A的一个副本分配给B
print(A)
print(A + B)
print(A * B)
a = 2
X = torch.arange(24).reshape(2, 3, 4)
print(a + X)
print((a * X).shape)
x = torch.arange(4, dtype=torch.float32)
print(x)
print(x.sum())
A_sum_axis0 = A.sum(axis=0)
print(A_sum_axis0)
print(A_sum_axis0.shape)
A_sum_axis1 = A.sum(axis=1)
print(A_sum_axis1)
print(A_sum_axis1.shape)
print(A.sum(axis=[0, 1]))
print(A.mean())
print(A.sum())
print(A.numel())
print(A.mean(axis=0), A.sum(axis=0) / A.shape[0])
sum_A = A.sum(axis=1, keepdims=True)
print(sum_A)
print(A / sum_A)
print(A.cumsum(axis=0))
y = torch.ones(4, dtype = torch.float32)
print(x)
print(y)
print(torch.dot(x, y))
print(torch.sum(x * y))
print(A.shape)
print(x.shape)
print(torch.mv(A, x))
B = torch.ones(4, 3)
print(torch.mm(A, B))
u = torch.tensor([3.0, -4.0])
print(torch.norm(u))
print(torch.abs(u).sum())
print(torch.norm(torch.ones((4, 9))))
标签:tensor,预备,sum,torch,笔记,动手,shape,arange,print
From: https://www.cnblogs.com/everfight/p/18158369