-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathMatOp.py
111 lines (91 loc) · 3.04 KB
/
MatOp.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import torch
import torch.nn as nn
import numpy as np
class BatchScalar33MatMul(nn.Module):
def __init__(self):
super().__init__()
def forward(self, scalar, mat):
s = scalar.unsqueeze(2)
s = s.expand_as(mat)
return s*mat
class GetIdentity(nn.Module):
def __init__(self):
super().__init__()
def forward(self, bn):
I = torch.eye(3, dtype=torch.float)
if torch.cuda.is_available():
I = I.cuda()
I = I.reshape((1, 3, 3))
I = I.repeat(bn, 1, 1)
return I
class Batch33MatVec3Mul(nn.Module):
def __init(self):
super().__init__()
def forward(self, mat, vec):
vec = vec.unsqueeze(2)
result = torch.matmul(mat, vec)
return result.squeeze(2)
class GetSkew(nn.Module):
def __init__(self):
super().__init__()
def forward(self, dw):
bn = dw.shape[0]
skew = torch.zeros((bn, 3, 3), dtype=torch.float)
if torch.cuda.is_available():
skew = skew.cuda()
skew[:, 0, 1] = -dw[:,2]
skew[:, 0, 2] = dw[:,1]
skew[:, 1, 2] = -dw[:,0]
skew[:, 1, 0] = dw[:, 2]
skew[:, 2, 0] = -dw[:, 1]
skew[:, 2, 1] = dw[:, 0]
return skew
class GetCovMatFromChol(nn.Module):
def __init__(self):
super().__init__()
def forward(self, chol_cov):
bn = chol_cov.shape[0]
L = torch.zeros(bn, 3, 3, dtype=torch.float)
LT = torch.zeros(bn, 3, 3, dtype=torch.float)
if torch.cuda.is_available():
L = L.cuda()
LT = LT.cuda()
index = 0
for j in range(0, 3):
for i in range(0, j + 1):
L[:, j, i] = chol_cov[:, index]
LT[:, i, j] = chol_cov[:, index]
index += 1
Q = torch.matmul(L, LT)
return Q
class GetCovMatFromChol_Sequence(nn.Module):
def __init__(self, seq_len):
super().__init__()
self.seq_len = seq_len
def forward(self, chol_cov):
bn = chol_cov.shape[0]
L = torch.zeros(bn, self.seq_len, 3, 3, dtype=torch.float)
LT = torch.zeros(bn, self.seq_len, 3, 3, dtype=torch.float)
if torch.cuda.is_available():
L = L.cuda()
LT = LT.cuda()
index = 0
for j in range(0, 3):
for i in range(0, j + 1):
L[:, :, j, i] = chol_cov[:, :, index]
LT[:, :, i, j] = chol_cov[:, :, index]
index += 1
Q = torch.matmul(L, LT)
return Q
if __name__ == '__main__':
mat1 = np.array([[[1, 2, 3], [4, 1, 6], [7, 8, 1]],
[[1, 12, 13], [14, 1, 16], [17, 18, 1]]], dtype=np.float32)
mat2 = -np.array([[[1, 2, 3], [4, 5, 6], [7, 8, 9]],
[[11, 12, 13], [14, 15, 16], [17, 18, 19]]], dtype=np.float32)
mat1 = torch.from_numpy(mat1).cuda()
mat2 = torch.from_numpy(mat2).cuda()
print(mat1)
print(mat1.shape)
# print(torch.transpose(mat1, dim0=2, dim1=1))
invMat1 = torch.inverse(mat1[:,])
print(invMat1)