我正在嘗試與dask開始。在下面的玩具示例中,我有三列,分別是site
,counts
和readings
。 site
和counts
是標量列,而readings
包含三維數組。如何使用Dask在此「嵌套」結構化數組上運行計算?
我可以在counts
上運行計算,但如果我嘗試在readings
上運行它們,我會得到一個異常。我在這裏正確使用dask?
import dask.array as da
import numpy as np
import tables
dtype = np.dtype([
('site', 'S1'),
('counts', np.int8),
('readings', np.float64, (2, 2, 3))
])
with tables.open_file('test.hdf5', 'w') as f:
sensors = f.create_table('/', 'sensors', description=dtype)
rows = [(site, count, np.random.rand(2, 2, 3))
for count, site in enumerate('abcdefghij')]
sensors.append(rows)
sensors.flush()
# Operating on 'counts' column works fine...
x = da.from_array(f.root.sensors.cols.counts, chunks=5)
x_add = (x + 1).compute()
# But on 'readings' does not
y = da.from_array(f.root.sensors.cols.readings, chunks=5)
y_add = (y + 1).compute()
在(y + 1).compute()
,我得到以下例外。 (底部的實際的錯誤似乎是從建築的錯誤字符串在pytables一個類型錯誤,所以它不是非常有幫助。)
TypeError Traceback (most recent call last)
<ipython-input-115-77c7e132695c> in <module>()
22 # But on readings column does not
23 y = da.from_array(f.root.sensors.cols.readings, chunks=5)
---> 24 y_add = (y + 1).compute()
~/miniconda/lib/python3.6/site-packages/dask/base.py in compute(self, **kwargs)
95 Extra keywords to forward to the scheduler ``get`` function.
96 """
---> 97 (result,) = compute(self, traverse=False, **kwargs)
98 return result
99
~/miniconda/lib/python3.6/site-packages/dask/base.py in compute(*args, **kwargs)
202 dsk = collections_to_dsk(variables, optimize_graph, **kwargs)
203 keys = [var._keys() for var in variables]
--> 204 results = get(dsk, keys, **kwargs)
205
206 results_iter = iter(results)
~/miniconda/lib/python3.6/site-packages/dask/threaded.py in get(dsk, result, cache, num_workers, **kwargs)
73 results = get_async(pool.apply_async, len(pool._pool), dsk, result,
74 cache=cache, get_id=_thread_get_id,
---> 75 pack_exception=pack_exception, **kwargs)
76
77 # Cleanup pools associated to dead threads
~/miniconda/lib/python3.6/site-packages/dask/local.py in get_async(apply_async, num_workers, dsk, result, cache, get_id, rerun_exceptions_locally, pack_exception, raise_exception, callbacks, dumps, loads, **kwargs)
519 _execute_task(task, data) # Re-execute locally
520 else:
--> 521 raise_exception(exc, tb)
522 res, worker_id = loads(res_info)
523 state['cache'][key] = res
~/miniconda/lib/python3.6/site-packages/dask/compatibility.py in reraise(exc, tb)
58 if exc.__traceback__ is not tb:
59 raise exc.with_traceback(tb)
---> 60 raise exc
61
62 else:
~/miniconda/lib/python3.6/site-packages/dask/local.py in execute_task(key, task_info, dumps, loads, get_id, pack_exception)
288 try:
289 task, data = loads(task_info)
--> 290 result = _execute_task(task, data)
291 id = get_id()
292 result = dumps((result, id))
~/miniconda/lib/python3.6/site-packages/dask/local.py in _execute_task(arg, cache, dsk)
269 func, args = arg[0], arg[1:]
270 args2 = [_execute_task(a, cache) for a in args]
--> 271 return func(*args2)
272 elif not ishashable(arg):
273 return arg
~/miniconda/lib/python3.6/site-packages/dask/array/core.py in getarray(a, b, lock)
61 lock.acquire()
62 try:
---> 63 c = a[b]
64 if type(c) != np.ndarray:
65 c = np.asarray(c)
~/miniconda/lib/python3.6/site-packages/tables/table.py in __getitem__(self, key)
3455 else:
3456 raise TypeError(
-> 3457 "'%s' key type is not valid in this context" % key)
3458
3459 def __iter__(self):
TypeError: not all arguments converted during string formatting
最後的x_add
值是array([ 6, 7, 8, 9, 10, 6, 7, 8, 9, 10], dtype=int8)
,這是+1最後一塊平鋪兩次。我期望[1, 2, ..., 10]
。再一次,讓我懷疑我是否按照預期使用了dask。
爲什麼你得到相同的數據兩次,我的猜測是,這些表共享線程之間的狀態,所以所有工作人員看到相同的最後數據。如果使用分佈式調度程序,這會消失。 – mdurant