Skip to content

Instantly share code, notes, and snippets.

@shoyer
Last active November 2, 2017 06:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save shoyer/4564971a4d030cd43bba8241d3b36c73 to your computer and use it in GitHub Desktop.
Save shoyer/4564971a4d030cd43bba8241d3b36c73 to your computer and use it in GitHub Desktop.
TypeError Traceback (most recent call last)
~/conda/envs/xarray-py36/lib/python3.6/site-packages/distributed/protocol/pickle.py in dumps(x)
37 try:
---> 38 result = pickle.dumps(x, protocol=pickle.HIGHEST_PROTOCOL)
39 if len(result) < 1000:
TypeError: can't pickle netCDF4._netCDF4.Variable objects
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-1-c9532d52e2b9> in <module>()
3 client = Client(processes=False)
4 ds = xarray.Dataset({"foo": ('x', [1, 2, 3])}).chunk()
----> 5 ds.to_netcdf('test.nc')
~/dev/xarray/xarray/core/dataset.py in to_netcdf(self, path, mode, format, group, engine, encoding, unlimited_dims)
1045 return to_netcdf(self, path, mode, format=format, group=group,
1046 engine=engine, encoding=encoding,
-> 1047 unlimited_dims=unlimited_dims)
1048
1049 def __unicode__(self):
~/dev/xarray/xarray/backends/api.py in to_netcdf(dataset, path_or_file, mode, format, group, engine, writer, encoding, unlimited_dims)
616 try:
617 dataset.dump_to_store(store, sync=sync, encoding=encoding,
--> 618 unlimited_dims=unlimited_dims)
619 if path_or_file is None:
620 return target.getvalue()
~/dev/xarray/xarray/core/dataset.py in dump_to_store(self, store, encoder, sync, encoding, unlimited_dims)
984 unlimited_dims=unlimited_dims)
985 if sync:
--> 986 store.sync()
987
988 def to_netcdf(self, path=None, mode='w', format=None, group=None,
~/dev/xarray/xarray/backends/netCDF4_.py in sync(self)
366 def sync(self):
367 with self.ensure_open(autoclose=True):
--> 368 super(NetCDF4DataStore, self).sync()
369 self.ds.sync()
370
~/dev/xarray/xarray/backends/common.py in sync(self)
200
201 def sync(self):
--> 202 self.writer.sync()
203
204 def store_dataset(self, dataset):
~/dev/xarray/xarray/backends/common.py in sync(self)
177 import dask
178 if LooseVersion(dask.__version__) > LooseVersion('0.8.1'):
--> 179 da.store(self.sources, self.targets, lock=GLOBAL_LOCK)
180 else:
181 da.store(self.sources, self.targets)
~/conda/envs/xarray-py36/lib/python3.6/site-packages/dask/array/core.py in store(sources, targets, lock, regions, compute, **kwargs)
865 dsk = sharedict.merge((name, updates), *[src.dask for src in sources])
866 if compute:
--> 867 Array._get(dsk, keys, **kwargs)
868 else:
869 from ..delayed import Delayed
~/conda/envs/xarray-py36/lib/python3.6/site-packages/dask/base.py in _get(cls, dsk, keys, get, **kwargs)
104 get = get or _globals['get'] or cls._default_get
105 dsk2 = optimization_function(cls)(ensure_dict(dsk), keys, **kwargs)
--> 106 return get(dsk2, keys, **kwargs)
107
108 @classmethod
~/conda/envs/xarray-py36/lib/python3.6/site-packages/distributed/client.py in get(self, dsk, keys, restrictions, loose_restrictions, resources, sync, asynchronous, **kwargs)
1917 futures = self._graph_to_futures(dsk, set(flatten([keys])),
1918 restrictions, loose_restrictions,
-> 1919 resources=resources)
1920 packed = pack_data(keys, futures)
1921 if sync:
~/conda/envs/xarray-py36/lib/python3.6/site-packages/distributed/client.py in _graph_to_futures(self, dsk, keys, restrictions, loose_restrictions, allow_other_workers, priority, resources)
1880
1881 self._send_to_scheduler({'op': 'update-graph',
-> 1882 'tasks': valmap(dumps_task, dsk3),
1883 'dependencies': valmap(list, dependencies),
1884 'keys': list(flatkeys),
~/conda/envs/xarray-py36/lib/python3.6/site-packages/toolz/dicttoolz.py in valmap(func, d, factory)
82 """
83 rv = factory()
---> 84 rv.update(zip(iterkeys(d), map(func, itervalues(d))))
85 return rv
86
~/conda/envs/xarray-py36/lib/python3.6/site-packages/distributed/worker.py in dumps_task(task)
681 elif not any(map(_maybe_complex, task[1:])):
682 return {'function': dumps_function(task[0]),
--> 683 'args': pickle.dumps(task[1:])}
684 return to_serialize(task)
685
~/conda/envs/xarray-py36/lib/python3.6/site-packages/distributed/protocol/pickle.py in dumps(x)
49 except Exception:
50 try:
---> 51 return cloudpickle.dumps(x, protocol=pickle.HIGHEST_PROTOCOL)
52 except Exception as e:
53 logger.info("Failed to serialize %s. Exception: %s", x, e)
~/conda/envs/xarray-py36/lib/python3.6/site-packages/cloudpickle/cloudpickle.py in dumps(obj, protocol)
898
899 cp = CloudPickler(file,protocol)
--> 900 cp.dump(obj)
901
902 return file.getvalue()
~/conda/envs/xarray-py36/lib/python3.6/site-packages/cloudpickle/cloudpickle.py in dump(self, obj)
232 self.inject_addons()
233 try:
--> 234 return Pickler.dump(self, obj)
235 except RuntimeError as e:
236 if 'recursion' in e.args[0]:
~/conda/envs/xarray-py36/lib/python3.6/pickle.py in dump(self, obj)
407 if self.proto >= 4:
408 self.framer.start_framing()
--> 409 self.save(obj)
410 self.write(STOP)
411 self.framer.end_framing()
~/conda/envs/xarray-py36/lib/python3.6/pickle.py in save(self, obj, save_persistent_id)
474 f = self.dispatch.get(t)
475 if f is not None:
--> 476 f(self, obj) # Call unbound method with explicit self
477 return
478
~/conda/envs/xarray-py36/lib/python3.6/pickle.py in save_tuple(self, obj)
749 write(MARK)
750 for element in obj:
--> 751 save(element)
752
753 if id(obj) in memo:
~/conda/envs/xarray-py36/lib/python3.6/pickle.py in save(self, obj, save_persistent_id)
494 reduce = getattr(obj, "__reduce_ex__", None)
495 if reduce is not None:
--> 496 rv = reduce(self.proto)
497 else:
498 reduce = getattr(obj, "__reduce__", None)
TypeError: can't pickle netCDF4._netCDF4.Variable objects
from distributed import Client
import xarray
client = Client(processes=False)
ds = xarray.Dataset({"foo": ('x', [1, 2, 3])}).chunk()
ds.to_netcdf('test.nc')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment