can't import OrderedDict ImportError: cannot import name 'OrderedDict' - allennlp

I'm trying to run a repo using allennlp. As I run it, I can't import from typing import OrderedDict in ddp_accelerator.py file.
Traceback (most recent call last):
File "train.py", line 10, in <module>
from models.newmodel import model
File "/home/GraphWriter-master/models/newmodel.py", line 4, in <module>
from models.list_encoder import list_encode, lseq_encode
File "/home/GraphWriter-master/models/list_encoder.py", line 6, in <module>
from allennlp.modules.elmo import Elmo
File "/home/GraphWriter-master/allennlp/modules/__init__.py", line 8, in <module>
from allennlp.modules.backbones import Backbone
File "/home/GraphWriter-master/allennlp/modules/backbones/__init__.py", line 3, in <module>
from allennlp.modules.backbones.vilbert_backbone import VilbertBackbone
File "/home/GraphWriter-master/allennlp/modules/backbones/vilbert_backbone.py", line 10, in <module>
from allennlp.modules.transformer import (
File "/home/GraphWriter-master/allennlp/modules/transformer/__init__.py", line 126, in <module>
from allennlp.modules.transformer.layer_norm import LayerNorm
File "/home/GraphWriter-master/allennlp/modules/transformer/layer_norm.py", line 3, in <module>
from allennlp.modules.transformer.transformer_module import TransformerModule
File "/home/GraphWriter-master/allennlp/modules/transformer/transformer_module.py", line 12, in <module>
from allennlp.nn.parallel import ShardedModuleMixin
File "/home/GraphWriter-master/allennlp/nn/parallel/__init__.py", line 2, in <module>
from allennlp.nn.parallel.ddp_accelerator import (
File "/home/GraphWriter-master/allennlp/nn/parallel/ddp_accelerator.py", line 6, in <module>
from typing import (
ImportError: cannot import name 'OrderedDict'
I tried to import it from collections or ordereddict as I saw in other posts but still can't figure it out.
I'm using Python 3.6
Thanks for helping me.

Do you have to use Python 3.6? Python 3.7 and upwards have OrderedDict where we expect it.

Related

Issue with KeyError: 'babel'

I am very new to Flask and everything related to Web development. I am building an app in Flask with Dash integrated and it is failing with the following error:
C:\Users\satpute\PycharmProjects\RMAPartsDepotPlanning\venv\Scripts\python.exe
C:/PycharmProjects/RMAPrototype/dashapp.py
Traceback (most recent call last):
File "C:\PycharmProjects\RMAPrototype\dashapp.py", line 4, in <module>
app = create_app()
File "C:\PycharmProjects\RMAPrototype\PDP\__init__.py", line 12, in create_app
from PDP import PDPApp
File "C:\PycharmProjects\RMAPrototype\PDP\PartsDepotPlanningApp.py", line 14, in <module>
from flask_table import Table, Col, LinkCol
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_table\__init__.py", line 1, in
<module>
from .table import Table, create_table
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_table\table.py", line 8, in
<module>
from .columns import Col
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_table\columns.py", line 161, in
<module>
class BoolCol(OptCol):
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_table\columns.py", line 166, in
BoolCol
yes_display = _('Yes')
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_babel\__init__.py", line 548, in
gettext
t = get_translations()
File "C:\PycharmProjects\RMAPrototype\venv\lib\site-packages\flask_babel\__init__.py", line 217,
in get_translations
babel = current_app.extensions['babel']
KeyError: 'babel'
> Process finished with exit code 1
How can I go about troubleshooting this? I tried different approaches but couldn't resolve it so far.

Upload Pandas dataframe as a JSON object in Cloud Storage

I have been trying to upload a Pandas dataframe to a JSON object in Cloud Storage using Cloud Function. Follwing is my code -
def upload_blob(bucket_name, source_file_name, destination_blob_name):
"""Uploads a file to the bucket."""
storage_client = storage.Client()
bucket = storage_client.get_bucket(bucket_name)
blob = bucket.blob(destination_blob_name)
blob.upload_from_file(source_file_name)
print('File {} uploaded to {}.'.format(
source_file_name,
destination_blob_name))
final_file = pd.concat([df, df_second], axis=0)
final_file.to_json('/tmp/abc.json')
with open('/tmp/abc.json', 'r') as file_obj:
upload_blob('test-bucket',file_obj,'abc.json')
I am getting the following error in line - blob.upload_from_file(source_file_name)
Deployment failure:
Function failed on loading user code. Error message: Code in file main.py
can't be loaded.
Detailed stack trace: Traceback (most recent call last):
File "/env/local/lib/python3.7/site-
packages/google/cloud/functions/worker.py", line 305, in
check_or_load_user_function
_function_handler.load_user_function()
File "/env/local/lib/python3.7/site-
packages/google/cloud/functions/worker.py", line 184, in load_user_function
spec.loader.exec_module(main)
File "<frozen importlib._bootstrap_external>", line 728, in exec_module
File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
File "/user_code/main.py", line 6, in <module>
import datalab.storage as gcs
File "/env/local/lib/python3.7/site-packages/datalab/storage/__init__.py",
line 16, in <module>
from ._bucket import Bucket, Buckets
File "/env/local/lib/python3.7/site-packages/datalab/storage/_bucket.py",
line 21, in <module>
import datalab.context
File "/env/local/lib/python3.7/site-packages/datalab/context/__init__.py",
line 15, in <module>
from ._context import Context
File "/env/local/lib/python3.7/site-packages/datalab/context/_context.py",
line 20, in <module>
from . import _project
File "/env/local/lib/python3.7/site-packages/datalab/context/_project.py",
line 18, in <module>
import datalab.utils
File "/env/local/lib/python3.7/site-packages/datalab/utils/__init__.py",
line 15
from ._async import async, async_function, async_method
^
SyntaxError: invalid syntax
What possibly is the error?
You are passing a string to blob.upload_from_file(), but this method requires a file object. You probably want to use blob.upload_from_filename() instead. Check the sample in the GCP docs.
Alternatively, you could get the file object, and keep using blob.upload_from_file(), but it's unnecessary extra lines.
with open('/tmp/abc.json', 'r') as file_obj:
upload_blob('test-bucket', file_obj, 'abc.json')
Use a bucket object instead of string
something like upload_blob(conn.get_bucket(mybucket),'/tmp/abc.json','abc.json')}

Skyfield year is out of range

I'm trying to use Skyfield to plot an orbit, but it doesn't work.
Here's the code:
import numpy as np
import matplotlib.pyplot as plt
from skyfield.api import Loader, Topos, EarthSatellite
text = """
GOCE
1 34602U 09013A 13314.96046236 .14220718 20669-5 50412-4 0 930
2 34602 096.5717 344.5256 0009826 296.2811 064.0942 16.58673376272979
"""
lines = text.strip().splitlines()
sat = EarthSatellite(lines[1], lines[2], lines[0])
print(sat.epoch.utc_jpl())
Here's the error I get:
File "orbit_preditor.py", line 21, in <module>
ISS = EarthSatellite(L1, L2)
File "C:\Python\Python36\lib\site-packages\skyfield\sgp4lib.py", line 86, in __init__
EarthSatellite.timescale = load.timescale()
File "C:\Python\Python36\lib\site-packages\skyfield\iokit.py", line 232, in timescale
preds = self('deltat.preds')
File "C:\Python\Python36\lib\site-packages\skyfield\iokit.py", line 142, in __call__
expiration_date, data = parser(f)
File "C:\Python\Python36\lib\site-packages\skyfield\iokit.py", line 309, in parse_deltat_preds
expiration_date = date(year[0] + 2, month[0], 1)
ValueError: year 58668 is out of range
Any ideas?
Try upgrading to the new version of Skyfield with pip install -U skyfield. A third party data file changed formats and so we made a new Skyfield release to fix it.

ImportError: cannot import name add_metaclass

ImportError: cannot import name add_metaclass
I'm getting this error. I'm using python27 and installed nltk and nltk_data, six but still the error is not solved. Please help.
Traceback (most recent call last):
File "C:/Users/swati/PycharmProjects/finalproject/one.py", line 1, in <module>
from nltk.stem import PorterStemmer
File "C:\Python27\lib\site-packages\nltk\__init__.py", line 128, in <module>
from nltk.chunk import *
File "C:\Python27\lib\site-packages\nltk\chunk\__init__.py", line 157, in <module>
from nltk.chunk.api import ChunkParserI
File "C:\Python27\lib\site-packages\nltk\chunk\api.py", line 13, in <module>
from nltk.parse import ParserI
File "C:\Python27\lib\site-packages\nltk\parse\__init__.py", line 77, in <module>
from nltk.parse.malt import MaltParser
File "C:\Python27\lib\site-packages\nltk\parse\malt.py", line 14, in <module>
from six import text_type
File "C:\Users\swati\PycharmProjects\finalproject\six.py", line 2, in <module>
from nltk.corpus import state_union
File "C:\Python27\lib\site-packages\nltk\corpus\__init__.py", line 64, in <module>
from nltk.tokenize import RegexpTokenizer
File "C:\Python27\lib\site-packages\nltk\tokenize\__init__.py", line 64, in <module>
from nltk.tokenize.mwe import MWETokenizer
File "C:\Python27\lib\site-packages\nltk\tokenize\mwe.py", line 33, in <module>
from nltk.tokenize.api import TokenizerI
File "C:\Python27\lib\site-packages\nltk\tokenize\api.py", line 14, in <module>
from six import add_metaclass
ImportError: cannot import name add_metaclass
Process finished with exit code 1

Cuda library dead after linux-updates

System ran beautifully, until I received update notifications from Ubuntu. So I accepted. And after they ran I get a big Cuda-issue:
('fp: ', <open file '/usr/local/lib/python2.7/dist-packages/tensorflow/python/_pywrap_tensorflow.so', mode 'rb' at 0x7f8af1a63300>)
('pathname: ', '/usr/local/lib/python2.7/dist-packages/tensorflow/python/_pywrap_tensorflow.so')
('description: ', ('.so', 'rb', 3))
Traceback (most recent call last):
File "translate.py", line 41, in <module>
import tensorflow.python.platform
File "/usr/local/lib/python2.7/dist-packages/tensorflow/__init__.py", line 23, in <module>
from tensorflow.python import *
File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/__init__.py", line 45, in <module>
from tensorflow.python import pywrap_tensorflow
File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/pywrap_tensorflow.py", line 31, in <module>
_pywrap_tensorflow = swig_import_helper()
File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/pywrap_tensorflow.py", line 27, in swig_import_helper
_mod = imp.load_module('_pywrap_tensorflow', fp, pathname, description)
ImportError: libcudart.so.7.5: cannot open shared object file: No such file or directory
Any idea?
thx
It seems like your system cannot find "libcudart.so.7.5".
libcudart.so.7.5: cannot open shared object file: No such file or directory
Could you check this file exist and you set the PATH/LD_LIBRARY_PATH correctly?
export PATH=/usr/local/cuda/bin:$PATH
export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH