forked from tensorflow/tensorflow
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlinalg_ops.py
More file actions
148 lines (121 loc) · 5.14 KB
/
linalg_ops.py
File metadata and controls
148 lines (121 loc) · 5.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Operations for linear algebra."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import gen_linalg_ops
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_linalg_ops import *
# pylint: enable=wildcard-import
@ops.RegisterShape("Cholesky")
def _CholeskyShape(op):
input_shape = op.inputs[0].get_shape().with_rank(2)
# The matrix must be square.
input_shape[0].assert_is_compatible_with(input_shape[1])
return [input_shape]
@ops.RegisterShape("BatchCholesky")
def _BatchCholeskyShape(op):
input_shape = op.inputs[0].get_shape().with_rank_at_least(3)
# The matrices in the batch must be square.
input_shape[-1].assert_is_compatible_with(input_shape[-2])
return [input_shape]
@ops.RegisterShape("MatrixDeterminant")
def _MatrixDeterminantShape(op):
input_shape = op.inputs[0].get_shape().with_rank(2)
# The matrix must be square.
input_shape[0].assert_is_compatible_with(input_shape[1])
if input_shape.ndims is not None:
return [tensor_shape.scalar()]
else:
return [tensor_shape.unknown_shape()]
@ops.RegisterShape("BatchMatrixDeterminant")
def _BatchMatrixDeterminantShape(op):
input_shape = op.inputs[0].get_shape().with_rank_at_least(3)
# The matrices in the batch must be square.
input_shape[-1].assert_is_compatible_with(input_shape[-2])
if input_shape.ndims is not None:
return [input_shape[:-2]]
else:
return [tensor_shape.unknown_shape()]
@ops.RegisterShape("MatrixInverse")
def _MatrixInverseShape(op):
input_shape = op.inputs[0].get_shape().with_rank(2)
# The matrix must be square.
input_shape[0].assert_is_compatible_with(input_shape[1])
return [input_shape]
@ops.RegisterShape("BatchMatrixInverse")
def _BatchMatrixInverseShape(op):
input_shape = op.inputs[0].get_shape().with_rank_at_least(3)
# The matrices in the batch must be square.
input_shape[-1].assert_is_compatible_with(input_shape[-2])
return [input_shape]
@ops.RegisterShape("SelfAdjointEig")
def _SelfAdjointEigShape(op):
input_shape = op.inputs[0].get_shape().with_rank(2)
# The matrix must be square.
input_shape[0].assert_is_compatible_with(input_shape[1])
d = input_shape.dims[0]
out_shape = tensor_shape.TensorShape([d+1, d])
return [out_shape]
@ops.RegisterShape("BatchSelfAdjointEig")
def _BatchSelfAdjointEigShape(op):
input_shape = op.inputs[0].get_shape().with_rank_at_least(3)
# The matrices in the batch must be square.
input_shape[-1].assert_is_compatible_with(input_shape[-2])
dlist = input_shape.dims
dlist[-2] += 1
out_shape = tensor_shape.TensorShape(dlist)
return [out_shape]
@ops.RegisterShape("MatrixSolve")
def _MatrixSolveShape(op):
lhs_shape = op.inputs[0].get_shape().with_rank(2)
rhs_shape = op.inputs[1].get_shape().with_rank_at_least(2)
# The matrix must be square.
lhs_shape[0].assert_is_compatible_with(lhs_shape[1])
# The matrix and righ-hand-side must have the same number of rows.
lhs_shape[0].assert_is_compatible_with(rhs_shape[0])
return [[lhs_shape[0], rhs_shape[1]]]
@ops.RegisterShape("BatchMatrixSolve")
def _BatchMatrixSolveShape(op):
lhs_shape = op.inputs[0].get_shape().with_rank_at_least(3)
rhs_shape = op.inputs[1].get_shape().with_rank_at_least(3)
# The matrices must be square.
lhs_shape[-1].assert_is_compatible_with(lhs_shape[-2])
# The matrices and righ-hand-sides in the batch must have the same number of
# rows.
lhs_shape[-2].assert_is_compatible_with(rhs_shape[-2])
return [lhs_shape[:-2].concatenate(rhs_shape[-1])]
@ops.RegisterShape("MatrixTriangularSolve")
def _MatrixTriangularSolveShape(op):
lhs_shape = op.inputs[0].get_shape().with_rank(2)
rhs_shape = op.inputs[1].get_shape().with_rank_at_least(2)
# The matrix must be square.
lhs_shape[0].assert_is_compatible_with(lhs_shape[1])
# The matrix and righ-hand side must have the same number of rows.
lhs_shape[0].assert_is_compatible_with(rhs_shape[0])
return [rhs_shape]
@ops.RegisterShape("BatchMatrixTriangularSolve")
def _BatchMatrixTriangularSolveShape(op):
lhs_shape = op.inputs[0].get_shape().with_rank_at_least(3)
rhs_shape = op.inputs[1].get_shape().with_rank_at_least(3)
# The matrices must be square.
lhs_shape[-1].assert_is_compatible_with(lhs_shape[-2])
# The matrices and righ-hand sides in the batch must have the same number of
# rows.
lhs_shape[-2].assert_is_compatible_with(rhs_shape[-2])
return [rhs_shape]