Skip to content

Full implementation of collapse. Required implementation of tensor.fr… #32

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 20 additions & 15 deletions pyttb/tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,12 +94,18 @@ def from_tensor_type(cls, source):
# CONVERSION
t = source.full()
return cls.from_data(t.data.copy(), t.shape)
elif isinstance(source, ttb.tenmat): # pragma: no cover
elif isinstance(source, ttb.tenmat):
# RESHAPE TENSOR-AS-MATRIX
# Here we just reverse what was done in the tenmat constructor.
# First we reshape the data to be an MDA, then we un-permute
# it using ipermute.
raise NotImplementedError
shape = source.tshape
order = np.hstack([source.rindices, source.cindices])
data = np.reshape(source.data.copy(), np.array(shape)[order], order='F')
if order.size > 1:
# data = ipermute(data, order)
data = np.transpose(data, np.argsort(order))
return cls.from_data(data, shape)

@classmethod
def from_function(cls, function_handle, shape):
Expand Down Expand Up @@ -127,7 +133,7 @@ def from_function(cls, function_handle, shape):
# Create the tensor
return cls.from_data(data, shape)

def collapse(self, dims=None, fun="sum"): # pragma: no cover
def collapse(self, dims=None, fun="sum"):
"""
Collapse tensor along specified dimensions.

Expand Down Expand Up @@ -159,23 +165,22 @@ def collapse(self, dims=None, fun="sum"): # pragma: no cover
else:
return fun(self.data.flatten('F'))

assert False, "collapse not implemented for arbitrary subset of dimensions; requires TENMAT class, which is not yet implemented"

## Calculate the size of the result
##newsize = self.shape[remdims]
#newsize = (self.shape[d] for d in remdims)
#print(newsize)
## Calculate the shape of the result
newshape = tuple(np.array(self.shape)[remdims])

## Convert to a matrix where each row is going to be collapsed
#A = ttb.tenmat(self, remdims, dims).double() # TODO depends on tenmat
A = ttb.tenmat.from_data(self.data, remdims, dims).double()

## Apply the collapse function
#B = np.zeros((A.shape[0], 1))
#for i in range(0, A.shape[0]):
# B[i] = fun(A[i, :])
B = np.zeros((A.shape[0], 1))
for i in range(0, A.shape[0]):
if fun == "sum":
B[i] = np.sum(A[i, :])
else:
B[i] = fun(A[i, :])

## Form and return the final result
#return ttb.tensor.from_tensor_type(ttb.tenmat(B, np.arange(0, np.prod(remdims)), np.array([]), newsize)) # TODO depends on tenmat
return ttb.tensor.from_data(B, newshape)

def contract(self, i, j):
"""
Expand Down Expand Up @@ -335,7 +340,7 @@ def innerprod(self, other):
#x = np.reshape(self.data, (1, self.data.size))
#y = np.reshape(other.data, (other.data.size, 1))
return x.dot(y)
elif isinstance(other, (ttb.ktensor, ttb.sptensor, ttb.ttensor)): # pragma: no cover
elif isinstance(other, (ttb.ktensor, ttb.sptensor, ttb.ttensor)):
# Reverse arguments and call specializer code
return other.innerprod(self)
else:
Expand Down
45 changes: 41 additions & 4 deletions tests/test_tensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,8 +97,9 @@ def test_tensor_initialization_from_data(sample_tensor_2way):


@pytest.mark.indevelopment
def test_tensor_initialization_from_tensor_type(sample_tensor_2way):
def test_tensor_initialization_from_tensor_type(sample_tensor_2way, sample_tensor_4way):
(params, tensorInstance) = sample_tensor_2way
(_, tensorInstance4) = sample_tensor_4way

# Copy Constructor
tensorCopy = ttb.tensor.from_tensor_type(tensorInstance)
Expand All @@ -116,6 +117,22 @@ def test_tensor_initialization_from_tensor_type(sample_tensor_2way):
assert (b.data == data).all()
assert (b.shape == shape)

# tenmat
tenmatInstance = ttb.tenmat.from_tensor_type(tensorInstance, np.array([0]))
tensorTenmatInstance = ttb.tensor.from_tensor_type(tenmatInstance)
assert tensorInstance.isequal(tensorTenmatInstance)

# 1D 1-element tenmat
tensorInstance1 = ttb.tensor.from_data(np.array([3]))
tenmatInstance1 = ttb.tenmat.from_tensor_type(tensorInstance1, np.array([0]))
tensorTenmatInstance1 = ttb.tensor.from_tensor_type(tenmatInstance1)
assert tensorInstance1.isequal(tensorTenmatInstance1)

# 4D tenmat
tenmatInstance4 = ttb.tenmat.from_tensor_type(tensorInstance4, np.array([3,0]))
tensorTenmatInstance4 = ttb.tensor.from_tensor_type(tenmatInstance4)
assert tensorInstance4.isequal(tensorTenmatInstance4)

@pytest.mark.indevelopment
def test_tensor_initialization_from_function():
def function_handle(x):
Expand Down Expand Up @@ -822,9 +839,29 @@ def test_tensor_collapse(sample_tensor_2way, sample_tensor_3way, sample_tensor_4
assert tensorInstance4.collapse() == 3321
assert tensorInstance4.collapse(fun=np.max) == 81

with pytest.raises(AssertionError) as excinfo:
tensorInstance2.collapse(np.array([0]))
assert "collapse not implemented for arbitrary subset of dimensions; requires TENMAT class, which is not yet implemented" in str(excinfo)
# single dimension collapse
data = np.array([5, 7, 9])
tensorCollapse = tensorInstance2.collapse(np.array([0]))
assert (tensorCollapse.data == data).all()

# single dimension collapse using max function
datamax = np.array([4, 5, 6])
tensorCollapseMax = tensorInstance2.collapse(np.array([0]), fun=np.max)
assert (tensorCollapseMax.data == datamax).all()

# multiple dimensions collapse
data4 = np.array([[ 99, 342, 585],
[126, 369, 612],
[153, 396, 639]])
tensorCollapse4 = tensorInstance4.collapse(np.array([0, 2]))
assert (tensorCollapse4.data == data4).all()

# multiple dimensions collapse
data4max = np.array([[21, 48, 75],
[24, 51, 78],
[27, 54, 81]])
tensorCollapse4Max = tensorInstance4.collapse(np.array([0, 2]), fun=np.max)
assert (tensorCollapse4Max.data == data4max).all()

@pytest.mark.indevelopment
def test_tensor_contract(sample_tensor_2way, sample_tensor_3way, sample_tensor_4way):
Expand Down