Skip to content

Instantly share code, notes, and snippets.

@scopatz
Created December 6, 2018 20:19
Show Gist options
  • Save scopatz/244da3bf6ca7329954a3c8d6d4b148fe to your computer and use it in GitHub Desktop.
Save scopatz/244da3bf6ca7329954a3c8d6d4b148fe to your computer and use it in GitHub Desktop.
Sparse Traceback
MemoryError Traceback (most recent call last)
<ipython-input-9-9938bd9ed670> in <module>
----> 1 get_ipython().run_cell_magic('memit', '', "start_time = time.time()\nD, E = robust_pca(tensor,\n verbose=True,\n tol=1e-3)\nend_time = time.time()\ntotal_time = end_time - start_time\nprint('Took %d mins %d secs' % (divmod(total_time, 60)))\n")
~/miniconda/lib/python3.6/site-packages/IPython/core/interactiveshell.py in run_cell_magic(self, magic_name, line, cell)
2321 magic_arg_s = self.var_expand(line, stack_depth)
2322 with self.builtin_trap:
-> 2323 result = fn(magic_arg_s, cell)
2324 return result
2325
<decorator-gen-126> in memit(self, line, cell)
~/miniconda/lib/python3.6/site-packages/IPython/core/magic.py in <lambda>(f, *a, **k)
185 # but it's overkill for just that one bit of state.
186 def magic_deco(arg):
--> 187 call = lambda f, *a, **k: f(*a, **k)
188
189 if callable(arg):
~/miniconda/lib/python3.6/site-packages/memory_profiler.py in memit(self, line, cell)
1015 timeout=timeout, interval=interval,
1016 max_usage=True,
-> 1017 include_children=include_children)
1018 mem_usage.append(tmp[0])
1019
~/miniconda/lib/python3.6/site-packages/memory_profiler.py in memory_usage(proc, interval, timeout, timestamps, include_children, multiprocess, max_usage, retval, stream, backend)
334 # Therefore, the whole process hangs indefinitely. Here, we are ensuring that the process gets killed!
335 try:
--> 336 returned = f(*args, **kw)
337 parent_conn.send(0) # finish timing
338 ret = parent_conn.recv()
~/miniconda/lib/python3.6/site-packages/memory_profiler.py in _func_exec(stmt, ns)
788 # helper for magic_memit, just a function proxy for the exec
789 # statement
--> 790 exec(stmt, ns)
791
792
<string> in <module>
~/miniconda/lib/python3.6/site-packages/tensorly/contrib/sparse/core.py in inner(*args, **kwargs)
10 def inner(*args, **kwargs):
11 with using_sparse_backend():
---> 12 return func(*args, **kwargs)
13
14 return inner
~/miniconda/lib/python3.6/site-packages/tensorly/decomposition/robust_decomposition.py in robust_pca(X, mask, tol, reg_E, reg_J, mu_init, mu_max, learning_rate, n_iter_max, random_state, verbose)
92 for i in range(T.ndim(X)):
93 print(f"Partial SVD J[{i}]")
---> 94 J[i] = fold(svd_thresholding(unfold(D, i) + unfold(L[i], i)/mu, reg_J/mu), i, X.shape)
95 #try:
96 # J[i] = fold(svd_thresholding(unfold(D, i) + unfold(L[i], i)/mu, reg_J/mu), i, X.shape)
~/miniconda/lib/python3.6/site-packages/tensorly/tenalg/proximal.py in svd_thresholding(matrix, threshold)
68 procrustes : procrustes operator
69 """
---> 70 U, s, V = tl.partial_svd(matrix, n_eigenvecs=min(matrix.shape))
71 try:
72 return tl.dot(U, tl.reshape(soft_thresholding(s, threshold), (-1, 1))*V)
~/miniconda/lib/python3.6/site-packages/tensorly/backend/core.py in inner(*args, **kwargs)
823 def inner(*args, **kwargs):
824 backend = getattr(_STATE, 'backend', _DEFAULT_BACKEND)
--> 825 return getattr(backend, name)(*args, **kwargs)
826
827 # We don't use `functools.wraps` here because some of the dispatched
~/miniconda/lib/python3.6/site-packages/tensorly/contrib/sparse/backend/numpy_backend.py in partial_svd(matrix, n_eigenvecs)
126 S, U = scipy.sparse.linalg.eigsh(xxT, k=n_eigenvecs, which='LM')
127 S = np.sqrt(S)
--> 128 V = conj.dot(U / S[None, :])
129 else:
130 xTx = matrix.T.dot(matrix)
~/miniconda/lib/python3.6/site-packages/sparse/coo/core.py in dot(self, other)
1410 [ 6, 11]], dtype=int64)
1411 """
-> 1412 return dot(self, other)
1413
1414 def __matmul__(self, other):
~/miniconda/lib/python3.6/site-packages/sparse/coo/common.py in dot(a, b)
273 if b.ndim == 1:
274 b_axis = -1
--> 275 return tensordot(a, b, axes=(a_axis, b_axis))
276
277
~/miniconda/lib/python3.6/site-packages/sparse/coo/common.py in tensordot(a, b, axes)
157 at = a.transpose(newaxes_a).reshape(newshape_a)
158 bt = b.transpose(newaxes_b).reshape(newshape_b)
--> 159 res = _dot(at, bt)
160 if isinstance(res, scipy.sparse.spmatrix):
161 res = COO.from_scipy_sparse(res)
~/miniconda/lib/python3.6/site-packages/sparse/coo/common.py in _dot(a, b)
287 if isinstance(b, (COO, scipy.sparse.spmatrix)):
288 b = b.tocsc()
--> 289 return a.dot(b)
290
291
~/miniconda/lib/python3.6/site-packages/scipy/sparse/base.py in dot(self, other)
359
360 """
--> 361 return self * other
362
363 def power(self, n, dtype=None):
~/miniconda/lib/python3.6/site-packages/scipy/sparse/base.py in __mul__(self, other)
468 return self._mul_vector(other.ravel()).reshape(M, 1)
469 elif other.ndim == 2 and other.shape[0] == N:
--> 470 return self._mul_multivector(other)
471
472 if isscalarlike(other):
~/miniconda/lib/python3.6/site-packages/scipy/sparse/compressed.py in _mul_multivector(self, other)
467
468 result = np.zeros((M,n_vecs), dtype=upcast_char(self.dtype.char,
--> 469 other.dtype.char))
470
471 # csr_matvecs or csc_matvecs
MemoryError:
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment