module 'thinc_gpu_ops' has no attribute 'hash'
See original GitHub issueOn Win10, thinc 6.12.0, spacy 2.0.16, cupy 4.1.0
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-5-2432a7701a48> in <module>()
1 # defining doc
----> 2 doc = nlp("Jill laughed at John Johnson.")
3 spacy.displacy.render(doc, style='dep', options={'distance' : 140}, jupyter=True)
E:\Anaconda3python\lib\site-packages\spacy\language.py in __call__(self, text, disable)
344 if not hasattr(proc, '__call__'):
345 raise ValueError(Errors.E003.format(component=type(proc), name=name))
--> 346 doc = proc(doc)
347 if doc is None:
348 raise ValueError(Errors.E005.format(name=name))
pipeline.pyx in spacy.pipeline.Tagger.__call__()
pipeline.pyx in spacy.pipeline.Tagger.predict()
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\model.py in __call__(self, x)
159 Must match expected shape
160 '''
--> 161 return self.predict(x)
162
163 def pipe(self, stream, batch_size=128):
E:\Anaconda3python\lib\site-packages\thinc\api.py in predict(self, X)
53 def predict(self, X):
54 for layer in self._layers:
---> 55 X = layer(X)
56 return X
57
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\model.py in __call__(self, x)
159 Must match expected shape
160 '''
--> 161 return self.predict(x)
162
163 def pipe(self, stream, batch_size=128):
E:\Anaconda3python\lib\site-packages\thinc\api.py in predict(seqs_in)
291 def predict(seqs_in):
292 lengths = layer.ops.asarray([len(seq) for seq in seqs_in])
--> 293 X = layer(layer.ops.flatten(seqs_in, pad=pad))
294 return layer.ops.unflatten(X, lengths, pad=pad)
295
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\model.py in __call__(self, x)
159 Must match expected shape
160 '''
--> 161 return self.predict(x)
162
163 def pipe(self, stream, batch_size=128):
E:\Anaconda3python\lib\site-packages\thinc\api.py in predict(self, X)
53 def predict(self, X):
54 for layer in self._layers:
---> 55 X = layer(X)
56 return X
57
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\model.py in __call__(self, x)
159 Must match expected shape
160 '''
--> 161 return self.predict(x)
162
163 def pipe(self, stream, batch_size=128):
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\model.py in predict(self, X)
123
124 def predict(self, X):
--> 125 y, _ = self.begin_update(X)
126 return y
127
E:\Anaconda3python\lib\site-packages\thinc\api.py in uniqued_fwd(X, drop)
372 return_counts=True)
373 X_uniq = layer.ops.xp.ascontiguousarray(X[ind])
--> 374 Y_uniq, bp_Y_uniq = layer.begin_update(X_uniq, drop=drop)
375 Y = Y_uniq[inv].reshape((X.shape[0],) + Y_uniq.shape[1:])
376 def uniqued_bwd(dY, sgd=None):
E:\Anaconda3python\lib\site-packages\thinc\api.py in begin_update(self, X, drop)
59 callbacks = []
60 for layer in self._layers:
---> 61 X, inc_layer_grad = layer.begin_update(X, drop=drop)
62 callbacks.append(inc_layer_grad)
63 def continue_update(gradient, sgd=None):
E:\Anaconda3python\lib\site-packages\thinc\api.py in begin_update(X, *a, **k)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in <listcomp>(.0)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in wrap(*args, **kwargs)
256 '''
257 def wrap(*args, **kwargs):
--> 258 output = func(*args, **kwargs)
259 if splitter is None:
260 to_keep, to_sink = output
E:\Anaconda3python\lib\site-packages\thinc\api.py in begin_update(X, *a, **k)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in <listcomp>(.0)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in wrap(*args, **kwargs)
256 '''
257 def wrap(*args, **kwargs):
--> 258 output = func(*args, **kwargs)
259 if splitter is None:
260 to_keep, to_sink = output
E:\Anaconda3python\lib\site-packages\thinc\api.py in begin_update(X, *a, **k)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in <listcomp>(.0)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in wrap(*args, **kwargs)
256 '''
257 def wrap(*args, **kwargs):
--> 258 output = func(*args, **kwargs)
259 if splitter is None:
260 to_keep, to_sink = output
E:\Anaconda3python\lib\site-packages\thinc\api.py in begin_update(X, *a, **k)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in <listcomp>(.0)
174 def begin_update(X, *a, **k):
175 forward, backward = split_backward(layers)
--> 176 values = [fwd(X, *a, **k) for fwd in forward]
177
178 output = ops.xp.hstack(values)
E:\Anaconda3python\lib\site-packages\thinc\api.py in wrap(*args, **kwargs)
256 '''
257 def wrap(*args, **kwargs):
--> 258 output = func(*args, **kwargs)
259 if splitter is None:
260 to_keep, to_sink = output
E:\Anaconda3python\lib\site-packages\thinc\neural\_classes\hash_embed.py in begin_update(self, ids, drop)
49 if ids.ndim >= 2:
50 ids = self.ops.xp.ascontiguousarray(ids[:, self.column], dtype='uint64')
---> 51 keys = self.ops.hash(ids, self.seed) % self.nV
52 vectors = self.vectors[keys].sum(axis=1)
53 mask = self.ops.get_dropout_mask((vectors.shape[1],), drop)
ops.pyx in thinc.neural.ops.CupyOps.hash()
AttributeError: module 'thinc_gpu_ops' has no attribute 'hash'
Issue Analytics
- State:
- Created 5 years ago
- Reactions:4
- Comments:32 (6 by maintainers)
Top Results From Across the Web
AttributeError: 'module' object has no attribute 'a'
a is just storing a variable with a string value. hashlib.a() is just trying to call a method called a in the hashlib...
Read more >AttributeError: module 'murmurhash' has no attribute 'hash'
AttributeError : module 'murmurhash' has no attribute 'hash' (Full stack trace below). I was successfully using Prodigy 1.6 previously.
Read more >Python SPSS - 'module' object has no attribute 'submit' - IBM
Hello, I'm new here but I've been following the threads. When executing the following Python/spss code, I<br> get an Attribute Error - I...
Read more >Error while installation: CryptContext object has no attribute
Hello guys, I'm new at Tryton and trying since hours to get the installation of trytond 5.6 on Ubuntu 20 running.
Read more >Traceback with ansible: AttributeError: 'module' object has no ...
Traceback with ansible: AttributeError: 'module' object has no attribute '_vendor' ... File "build/bdist.linux-x86_64/egg/Crypto/Hash/HMAC.py", line 66, ...
Read more >
Top Related Medium Post
No results found
Top Related StackOverflow Question
No results found
Troubleshoot Live Code
Lightrun enables developers to add logs, metrics and snapshots to live code - no restarts or redeploys required.
Start Free
Top Related Reddit Thread
No results found
Top Related Hackernoon Post
No results found
Top Related Tweet
No results found
Top Related Dev.to Post
No results found
Top Related Hashnode Post
No results found
Have the same error on Ubuntu with latest pip version of spaCy. There are some references on how to solve it but none of them helps to fix my case. Other GPU-dependant libraries work well (i.e. PyTorch, TensorFlow, Catboost, etc.).
Could you please create some README with the most recent recommendations on how to enable GPU training? Right now it is a bit difficult to make spaCy work with GPU.
Has anyone resolved this? I first filed this issue in October but this thread is more lively.