Skip to content

Instantly share code, notes, and snippets.

@pjbull
Last active January 1, 2016 22:52
Show Gist options
  • Save pjbull/9ddb9d5ee403d9730724 to your computer and use it in GitHub Desktop.
Save pjbull/9ddb9d5ee403d9730724 to your computer and use it in GitHub Desktop.
Traceback Library
================================================
Failed to save <type 'numpy.ndarray'> to .npy file:
Traceback (most recent call last):
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/externals/joblib/numpy_pickle.py", line 275, in save
obj, filename = self._write_array(obj, filename)
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/externals/joblib/numpy_pickle.py", line 236, in _write_array
self.np.save(filename, array)
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/numpy/lib/npyio.py", line 491, in save
pickle_kwargs=pickle_kwargs)
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/numpy/lib/format.py", line 585, in write_array
array.tofile(fp)
IOError: 1946374200 requested and 1853623286 written
Traceback (most recent call last):
File "model.py", line 105, in <module>
run_staged()
File "model.py", line 92, in run_staged
sp.train(X, Y)
File "/mnt/biggiggy/ers-models/src/shared/__init__.py", line 183, in train
fit_model = grid.fit(X, this_Y)
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/grid_search.py", line 804, in fit
return self._fit(X, y, ParameterGrid(self.param_grid))
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/grid_search.py", line 553, in _fit
for parameters in parameter_iterable
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/externals/joblib/parallel.py", line 812, in __call__
self.retrieve()
File "/home/ubuntu/anaconda/lib/python2.7/site-packages/sklearn/externals/joblib/parallel.py", line 731, in retrieve
self._output.extend(job.get())
File "/home/ubuntu/anaconda/lib/python2.7/multiprocessing/pool.py", line 567, in get
raise self._value
IOError: [Errno 28] No space left on device
# FIXED (/run/shm) was getting full, setting the JOBLIB_TEMP_FOLDER
# environment variable to a larger volume (e.g, tmp) fixed it
================================================
================================================
Traceback (most recent call last):
File "src/models.py", line 205, in <module>
gridsearch_pipeline()
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/click/core.py", line 716, in __call__
return self.main(*args, **kwargs)
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/click/core.py", line 696, in main
rv = self.invoke(ctx)
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/click/core.py", line 889, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/click/core.py", line 534, in invoke
return callback(*args, **kwargs)
File "src/models.py", line 187, in gridsearch_pipeline
fit_model = grid.fit(X, Y)
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/sklearn/grid_search.py", line 804, in fit
return self._fit(X, y, ParameterGrid(self.param_grid))
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/sklearn/grid_search.py", line 553, in _fit
for parameters in parameter_iterable
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/sklearn/externals/joblib/parallel.py", line 812, in __call__
self.retrieve()
File "/home/ubuntu/anaconda2/lib/python2.7/site-packages/sklearn/externals/joblib/parallel.py", line 731, in retrieve
self._output.extend(job.get())
File "/home/ubuntu/anaconda2/lib/python2.7/multiprocessing/pool.py", line 567, in get
raise self._value
struct.error: 'i' format requires -2147483648 <= number <= 2147483647
make: *** [models/339/rf_reg/model.pkl] Error 1
# FIXED: Looks like GridSearchCV fails once training set is large enough:
# https://github.com/scikit-learn/scikit-learn/issues/3313#issuecomment-47373219
# Swap out GridSearchCV for another method or train on a random subset
================================================
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment