我有一大堆句子(约 7 百万),我想从中提取名词。
我使用 joblib
库来并行化提取过程,如下所示:
import spacy
from tqdm import tqdm
from joblib import Parallel, delayed
nlp = spacy.load('en_core_web_sm')
class nouns:
def get_nouns(self, text):
doc = nlp(u"{}".format(text))
return [token.text for token in doc if token.tag_ in ['NN', 'NNP', 'NNS', 'NNPS']]
def parallelize(self, sentences):
results = Parallel(n_jobs=1)(delayed(self.get_nouns)(sent) for sent in tqdm(sentences))
return results
if __name__ == '__main__':
sentences = ['we went to the school yesterday',
'The weather is really cold',
'Can we catch the dog?',
'How old are you John?',
'I like diving and swimming',
'Can the world become united?']
obj = nouns()
print(obj.parallelize(sentences))
当 parallelize 函数中的 n_jobs
大于 1 时,我得到这个长错误:
100%|██████████| 6/6 [00:00<00:00, 200.00it/s]
joblib.externals.loky.process_executor._RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\queues.py", line 150, in _feed
obj_ = dumps(obj, reducers=reducers)
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\reduction.py", line 243, in dumps
dump(obj, buf, reducers=reducers, protocol=protocol)
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\reduction.py", line 236, in dump
_LokyPickler(file, reducers=reducers, protocol=protocol).dump(obj)
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 267, in dump
return Pickler.dump(self, obj)
File "C:\Python35\lib\pickle.py", line 408, in dump
self.save(obj)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 841, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 770, in save_list
self._batch_appends(obj)
File "C:\Python35\lib\pickle.py", line 797, in _batch_appends
save(tmp[0])
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 725, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 718, in save_instancemethod
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
File "C:\Python35\lib\pickle.py", line 599, in save_reduce
save(args)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 725, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 395, in save_function
self.save_function_tuple(obj)
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 594, in save_function_tuple
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 841, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 599, in save_reduce
save(args)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 740, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 740, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 495, in save
rv = reduce(self.proto)
File "stringsource", line 2, in preshed.maps.PreshMap.__reduce_cython__
TypeError: self.c_map cannot be converted to a Python object for pickling
"""Exception in thread QueueFeederThread:
Traceback (most recent call last):
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\queues.py", line 150, in _feed
obj_ = dumps(obj, reducers=reducers)
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\reduction.py", line 243, in dumps
dump(obj, buf, reducers=reducers, protocol=protocol)
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\reduction.py", line 236, in dump
_LokyPickler(file, reducers=reducers, protocol=protocol).dump(obj)
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 267, in dump
return Pickler.dump(self, obj)
File "C:\Python35\lib\pickle.py", line 408, in dump
self.save(obj)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 841, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 770, in save_list
self._batch_appends(obj)
File "C:\Python35\lib\pickle.py", line 797, in _batch_appends
save(tmp[0])
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 725, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 718, in save_instancemethod
self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
File "C:\Python35\lib\pickle.py", line 599, in save_reduce
save(args)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 725, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 395, in save_function
self.save_function_tuple(obj)
File "C:\Python35\lib\site-packages\joblib\externals\cloudpickle\cloudpickle.py", line 594, in save_function_tuple
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 841, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 810, in save_dict
self._batch_setitems(obj.items())
File "C:\Python35\lib\pickle.py", line 836, in _batch_setitems
save(v)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 599, in save_reduce
save(args)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 740, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 520, in save
self.save_reduce(obj=obj, *rv)
File "C:\Python35\lib\pickle.py", line 623, in save_reduce
save(state)
File "C:\Python35\lib\pickle.py", line 475, in save
f(self, obj) # Call unbound method with explicit self
File "C:\Python35\lib\pickle.py", line 740, in save_tuple
save(element)
File "C:\Python35\lib\pickle.py", line 495, in save
rv = reduce(self.proto)
File "stringsource", line 2, in preshed.maps.PreshMap.__reduce_cython__
TypeError: self.c_map cannot be converted to a Python object for pickling
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "C:\Python35\lib\threading.py", line 914, in _bootstrap_inner
self.run()
File "C:\Python35\lib\threading.py", line 862, in run
self._target(*self._args, **self._kwargs)
File "C:\Python35\lib\site-packages\joblib\externals\loky\backend\queues.py", line 175, in _feed
onerror(e, obj)
File "C:\Python35\lib\site-packages\joblib\externals\loky\process_executor.py", line 310, in _on_queue_feeder_error
self.thread_wakeup.wakeup()
File "C:\Python35\lib\site-packages\joblib\externals\loky\process_executor.py", line 155, in wakeup
self._writer.send_bytes(b"")
File "C:\Python35\lib\multiprocessing\connection.py", line 183, in send_bytes
self._check_closed()
File "C:\Python35\lib\multiprocessing\connection.py", line 136, in _check_closed
raise OSError("handle is closed")
OSError: handle is closed
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File ".../playground.py", line 43, in <module>
print(obj.Paralize(sentences))
File ".../playground.py", line 32, in Paralize
results = Parallel(n_jobs=2)(delayed(self.get_nouns)(sent) for sent in tqdm(sentences))
File "C:\Python35\lib\site-packages\joblib\parallel.py", line 934, in __call__
self.retrieve()
File "C:\Python35\lib\site-packages\joblib\parallel.py", line 833, in retrieve
self._output.extend(job.get(timeout=self.timeout))
File "C:\Python35\lib\site-packages\joblib_parallel_backends.py", line 521, in wrap_future_result
return future.result(timeout=timeout)
File "C:\Python35\lib\concurrent\futures_base.py", line 405, in result
return self.__get_result()
File "C:\Python35\lib\concurrent\futures_base.py", line 357, in __get_result
raise self._exception
_pickle.PicklingError: Could not pickle the task to send it to the workers.
我的代码有什么问题?
原文由 Minions 发布,翻译遵循 CC BY-SA 4.0 许可协议
好吧,问题很可能不是来自代码,而是来自“隐藏”处理,一旦
n_jobs
指示(和joblib
内部编排)准备那么多精确副本主进程,以便让它们彼此独立工作(从而有效地摆脱 GIL 锁定并将多个进程流映射到物理硬件资源)此步骤负责制作所有 pythonic 对象的副本,并且已知使用
Pickle
来执行此操作。Pickle
模块以其对可以 pickle 和不能 pickle 的历史主要限制而闻名。错误消息证实了这一点:
人们可能会尝试一种技巧来提供 Mike McKearns
dill
模块而不是Pickle
并测试你的“有问题的”python 对象是否会被这个模块腌制而不抛出这个错误。dill
具有相同的 API 签名,因此纯import dill as pickle
可能有助于将所有其他代码保持不变。我遇到了同样的问题,大型模型要分布到多个进程中并从多个进程返回,而
dill
是一种可行的方法。性能也有所提高。这是发现
dill
的一个很酷的副作用,一旦import dill as pickle
完成,pickle.dump_session( <aFile> )
将保存完整的 python 状态解释器会话副本。如果需要,这可以恢复(崩溃后恢复、训练有素和优化的 ML 模型状态完全保存/恢复、增量学习 ML 模型状态完全保存和重新分发以用于已部署用户群的远程恢复, ETC。 )