代码:
from __future__ import print_function
import logging
import os.path
import six
import sys
from gensim.corpora import WikiCorpus
if __name__ == '__main__':
program = os.path.basename(sys.argv[0])
logger = logging.getLogger(program)
logging.basicConfig(format='%(asctime)s: %(levelname)s: %(message)s')
logging.root.setLevel(level=logging.INFO)
logger.info("running %s" % ' '.join(sys.argv))
# check and process input arguments
if len(sys.argv) != 3:
print("Using: python process_wiki.py enwiki.xxx.xml.bz2 wiki.en.text")
sys.exit(1)
inp, outp = sys.argv[1:3]
space = b" "
i = 0
output = open(outp, 'w')
wiki = WikiCorpus(inp, lemmatize=False, dictionary={})
for text in wiki.get_texts():
if six.PY3:
output.write(bytes(' '.join(text), 'utf-8').decode('utf-8') + '\n')
# ###another method###
# output.write(
# space.join(map(lambda x:x.decode("utf-8"), text)) + '\n')
else:
output.write(space.join(text) + "\n")
i = i + 1
if (i % 10000 == 0):
logger.info("Saved " + str(i) + " articles")
output.close()
logger.info("Finished Saved " + str(i) + " articles")
报错信息
raceback (most recent call last):
File "C:/Users/root/PycharmProjects/AI/super/ALP/process.py", line 28, in <module>
for text in wiki.get_texts():
File "C:\soft\anaconda\lib\site-packages\gensim\corpora\wikicorpus.py", line 369, in get_texts
for group in utils.chunkize(texts, chunksize=10 * self.processes, maxsize=1):
File "C:\soft\anaconda\lib\site-packages\gensim\utils.py", line 863, in chunkize
for chunk in chunkize_serial(corpus, chunksize, as_numpy=as_numpy):
File "C:\soft\anaconda\lib\site-packages\gensim\utils.py", line 816, in chunkize_serial
wrapped_chunk = [list(itertools.islice(it, int(chunksize)))]
File "C:\soft\anaconda\lib\site-packages\gensim\corpora\wikicorpus.py", line 361, in <genexpr>
((text, self.lemmatize, title, pageid, tokenization_params)
File "C:\soft\anaconda\lib\site-packages\gensim\corpora\wikicorpus.py", line 212, in extract_pages
elem = next(elems)
File "C:\soft\anaconda\lib\site-packages\gensim\corpora\wikicorpus.py", line 206, in <genexpr>
elems = (elem for _, elem in iterparse(f, events=("end",)))
File "C:\soft\anaconda\lib\xml\etree\ElementTree.py", line 1223, in iterator
data = source.read(16 * 1024)
File "C:\soft\anaconda\lib\bz2.py", line 182, in read
return self._buffer.read(size)
File "C:\soft\anaconda\lib\_compression.py", line 68, in readinto
data = self.read(len(byte_view))
File "C:\soft\anaconda\lib\_compression.py", line 103, in read
data = self._decompressor.decompress(rawblock, size)
OSError: Invalid data stream
Process finished with exit code 1
请问楼主找到解决办法了吗,我也遇到这种问题卡住了。。。