ValueError: [E1005] Unable to set attribute 'POS' in tokenizer exception for ' '. Tokenizer exceptions are only allowed to specify ORTH and NORM.
Traceback:
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\streamlit\runtime\scriptrunner\script_runner.py", line 535, in run_script
exec(code, module.dict)
File "D:\pro\AI-Resume-Analyzer-main\App\App.py", line 804, in
run()
File "D:\pro\AI-Resume-Analyzer-main\App\App.py", line 269, in run
resume_data = ResumeParser(save_image_path).get_extracted_data()
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\pyresparser\resume_parser.py", line 21, in init
custom_nlp = spacy.load(os.path.dirname(os.path.abspath(file)))
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy_init.py", line 51, in load
return util.load_model(
^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy\util.py", line 467, in load_model
return load_model_from_path(Path(name), **kwargs) # type: ignore[arg-type]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy\util.py", line 547, in load_model_from_path
return nlp.from_disk(model_path, exclude=exclude, overrides=overrides)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy\language.py", line 2184, in from_disk
util.from_disk(path, deserializers, exclude) # type: ignore[arg-type]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy\util.py", line 1372, in from_disk
reader(path / key)
File "D:\pro\AI-Resume-Analyzer-main\venvapp\Lib\site-packages\spacy\language.py", line 2170, in
deserializers["tokenizer"] = lambda p: self.tokenizer.from_disk( # type: ignore[union-attr]
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "spacy\tokenizer.pyx", line 771, in spacy.tokenizer.Tokenizer.from_disk
File "spacy\tokenizer.pyx", line 839, in spacy.tokenizer.Tokenizer.from_bytes
File "spacy\tokenizer.pyx", line 123, in spacy.tokenizer.Tokenizer.rules.set
File "spacy\tokenizer.pyx", line 571, in spacy.tokenizer.Tokenizer._load_special_cases
File "spacy\tokenizer.pyx", line 601, in spacy.tokenizer.Tokenizer.add_special_case
File "spacy\tokenizer.pyx", line 589, in spacy.tokenizer.Tokenizer._validate_sp