ImportError: cannot import name requests while installing Tensorflow - python-2.7

I have installed Anaconda2 on my windows 10 with python 2.7.15 version. Now, I need to install tensorflow and Keras, however, I am consistently getting the error while installing using "conda" or "pip". I googled it a lot about this error, but to no avail. Please help me out in finding the lead to resolving this issue. TIA, Priya Arora
The stack trace is as below:
(base) C:\Users\ARORAP1\anaconda2>conda install jupyter
Traceback (most recent call last):
File "C:\Users\ARORAP1\anaconda2\Scripts\conda-script.py", line 10, in <module
>
sys.exit(main())
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\cli\main.py", line 11
3, in main
return conda_exception_handler(_main, *args)
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\exceptions.py", line
1112, in conda_exception_handler
return_value = exception_handler(func, *args, **kwargs)
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\exceptions.py", line
822, in __call__
return self.handle_exception(exc_val, exc_tb)
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\exceptions.py", line
864, in handle_exception
return self.handle_unexpected_exception(exc_val, exc_tb)
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\exceptions.py", line
876, in handle_unexpected_exception
self.print_unexpected_error_report(error_report)
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\exceptions.py", line
947, in print_unexpected_error_report
from .cli.main_info import get_env_vars_str, get_main_info_str
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\cli\main_info.py", li
ne 25, in <module>
from ..core.subdir_data import SubdirData
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\core\subdir_data.py",
line 28, in <module>
from ..core.package_cache_data import PackageCacheData
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\core\package_cache_da
ta.py", line 13, in <module>
from .path_actions import CacheUrlAction, ExtractPackageAction
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\core\path_actions.py"
, line 25, in <module>
from ..gateways.connection.download import download
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\conda\gateways\connection\_
_init__.py", line 16, in <module>
from pip._vendor.requests import ConnectionError, HTTPError, Session
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\pip\_vendor\requests\__init
__.py", line 83, in <module>
from pip._internal.compat import WINDOWS
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\pip\_internal\__init__.py",
line 42, in <module>
from pip._internal import cmdoptions
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\pip\_internal\cmdoptions.py
", line 16, in <module>
from pip._internal.index import (
File "C:\Users\ARORAP1\anaconda2\lib\site-packages\pip\_internal\index.py", li
ne 15, in <module>
from pip._vendor import html5lib, requests, six
ImportError: cannot import name requests
(base) C:\Users\ARORAP1\anaconda2>

Related

AttributeError: 'EntryPoints' object has no attribute 'get' with superset

I am installing superset and at the level of superset db upgrade I got an error saying: AttributeError: 'EntryPoints' object has no attribute 'get'
Error:
python datavisual\Scripts\superset db upgrade
Traceback (most recent call last):
File "datavisual\Scripts\superset", line 18, in <module>
from superset.cli import superset
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\superset\__init__.py", line 21, in <module>
from superset.app import create_app
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\superset\app.py", line 29, in <module>
from superset.extensions import (
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\superset\extensions.py", line 126, in <module>
celery_app = celery.Celery()
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\celery\local.py", line 470, in __getattr__
[name])
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\celery\app\__init__.py", line 2, in <module>
from celery import _state
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\celery\_state.py", line 15, in <module>
from celery.utils.threads import LocalStack
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\celery\utils\__init__.py", line 16, in <module>
from .nodenames import nodename, nodesplit, worker_direct
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\celery\utils\nodenames.py", line 6, in <module>
from kombu.entity import Exchange, Queue
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\kombu\entity.py", line 7, in <module>
from .serialization import prepare_accept_content
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\kombu\serialization.py", line 440, in <module>
for ep, args in entrypoints('kombu.serializers'): # pragma: no cover
File "C:\Users\user\Desktop\superset\datavisual\lib\site-packages\kombu\utils\compat.py", line 82, in entrypoints
for ep in importlib_metadata.entry_points().get(namespace, [])
AttributeError: 'EntryPoints' object has no attribute 'get'

How to solve error while launching scrapy shell?

when i executed scrapy shell 'https://scrapy.org' the following error occured, i followed the instructions from the scrapy documentation https://doc.scrapy.org/en/latest/topics/shell.html#launch-the-shell
Traceback (most recent call last):
File "/usr/local/bin/scrapy", line 11, in <module>
sys.exit(execute())
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 150, in execute
_run_print_help(parser, _run_command, cmd, args, opts)
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 90, in _run_print_help
func(*a, **kw)
File "/usr/local/lib/python2.7/dist-packages/scrapy/cmdline.py", line 157, in _run_command
cmd.run(args, opts)
File "/usr/local/lib/python2.7/dist-packages/scrapy/commands/shell.py", line 65, in run
crawler = self.crawler_process._create_crawler(spidercls)
File "/usr/local/lib/python2.7/dist-packages/scrapy/crawler.py", line 203, in _create_crawler
return Crawler(spidercls, self.settings)
File "/usr/local/lib/python2.7/dist-packages/scrapy/crawler.py", line 55, in __init__
self.extensions = ExtensionManager.from_crawler(self)
File "/usr/local/lib/python2.7/dist-packages/scrapy/middleware.py", line 58, in from_crawler
return cls.from_settings(crawler.settings, crawler)
File "/usr/local/lib/python2.7/dist-packages/scrapy/middleware.py", line 34, in from_settings
mwcls = load_object(clspath)
File "/usr/local/lib/python2.7/dist-packages/scrapy/utils/misc.py", line 44, in load_object
mod = import_module(module)
File "/usr/lib/python2.7/importlib/__init__.py", line 37, in import_module
__import__(name)
File "/usr/local/lib/python2.7/dist-packages/scrapy/extensions/memusage.py", line 16, in <module>
from scrapy.mail import MailSender
File "/usr/local/lib/python2.7/dist-packages/scrapy/mail.py", line 25, in <module>
from twisted.internet import defer, reactor, ssl
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/ssl.py", line 230, in <module>
from twisted.internet._sslverify import (
File "/usr/local/lib/python2.7/dist-packages/twisted/internet/_sslverify.py", line 15, in <module>
from OpenSSL._util import lib as pyOpenSSLlib
ImportError: No module named _util
It's a problem in scrapy installation. Maybe this can help:
sudo pip install pyopenssl --user --upgrade

scrapy startproject tutorial error Mac OS

I am new to Python and coding in general. I am trying to build a scraper through Scrapy. I am trying to do this on Mac OS 10.12.6. I've followed the instructions as closely as possible (https://doc.scrapy.org/en/latest/intro/tutorial.html) and yet when I call
scrapy startproject tutorial I get the following output below. What's the best way to solve this and to understand future error messages as such?
Thanks!
File "/usr/local/bin/scrapy", line 9, in <module>
load_entry_point('Scrapy==1.4.0', 'console_scripts', 'scrapy')()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/pkg_resources/__init__.py", line 565, in load_entry_point
return get_distribution(dist).load_entry_point(group, name)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/pkg_resources/__init__.py", line 2697, in load_entry_point
return ep.load()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/pkg_resources/__init__.py", line 2370, in load
return self.resolve()
File "/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/pkg_resources/__init__.py", line 2376, in resolve
module = __import__(self.module_name, fromlist=['__name__'], level=0)
File "/Library/Python/2.7/site-packages/Scrapy-1.4.0-py2.7.egg/scrapy/cmdline.py", line 9, in <module>
from scrapy.crawler import CrawlerProcess
File "/Library/Python/2.7/site-packages/Scrapy-1.4.0-py2.7.egg/scrapy/crawler.py", line 7, in <module>
from twisted.internet import reactor, defer
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/reactor.py", line 38, in <module>
from twisted.internet import default
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/default.py", line 56, in <module>
install = _getInstallFunction(platform)
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/default.py", line 50, in _getInstallFunction
from twisted.internet.selectreactor import install
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/selectreactor.py", line 18, in <module>
from twisted.internet import posixbase
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/posixbase.py", line 18, in <module>
from twisted.internet import error, udp, tcp
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/tcp.py", line 28, in <module>
from twisted.internet._newtls import (
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/_newtls.py", line 21, in <module>
from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/protocols/tls.py", line 63, in <module>
from twisted.internet._sslverify import _setAcceptableProtocols
File "/Library/Python/2.7/site-packages/Twisted-17.9.0-py2.7-macosx-10.12-intel.egg/twisted/internet/_sslverify.py", line 38, in <module>
TLSVersion.TLSv1_1: SSL.OP_NO_TLSv1_1,
AttributeError: 'module' object has no attribute 'OP_NO_TLSv1_1'
I fixed this by updating to pyOpenSSL 0.14.
Shortly after though I had more problems with the local and user versions of python but sorted this by downloading Anaconda.

tensorflow import error in python 2.7.6

I have installed tensor flow from tensorflow website. While trying to import tensor flow its showing following error
>>>import tensorflow as tf
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python2.7/dist-packages/tensorflow/__init__.py", line 24, in <module>
from tensorflow.python import *
File "/usr/local/lib/python2.7/dist-packages/tensorflow/python/__init__.py", line 75, in <module>
from tensorflow.core.framework.graph_pb2 import *
File "/usr/local/lib/python2.7/dist-packages/tensorflow/core/framework/graph_pb2.py", line 6, in <module>
from google.protobuf import descriptor as _descriptor
File "/usr/local/lib/python2.7/dist-packages/google/protobuf/__init__.py", line 37, in <module>
__import__('pkg_resources').declare_namespace(__name__)
File "/usr/local/lib/python2.7/dist-packages/pkg_resources/__init__.py", line 36, in <module>
import email.parser
File "/usr/lib/python2.7/email/parser.py", line 12, in <module>
from email.feedparser import FeedParser
File "/usr/lib/python2.7/email/feedparser.py", line 27, in <module>
from email import message
File "/usr/lib/python2.7/email/message.py", line 16, in <module>
import email.charset
File "/usr/lib/python2.7/email/charset.py", line 13, in <module>
import email.base64mime
File "/usr/lib/python2.7/email/base64mime.py", line 40, in <module>
from email.utils import fix_eols
File "/usr/lib/python2.7/email/utils.py", line 28, in <module>
import socket
File "/home/tamarind/socket.py", line 5, in <module>
This module provides socket operations and some related functions.
TypeError: 'module' object is not callable
I think its problem with socket please suggest. Please suggest.
It looks like /home/tamarind/socket.py is taking precedence over the python socket module. Just rename/remove that (or import TensorFlow from a different directory) and it should work. You may need to remove socket.pyc too.

mac os import pymongo cause error

How can I fix this problem ? tks
>>> import pymongo
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/pymongo/__init__.py", line 80, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/pymongo/connection.py", line 39, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/pymongo/mongo_client.py", line 44, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/bson/__init__.py", line 41, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/bson/_cbson.py", line 7, in <module>
File "/Library/Python/2.7/site-packages/pymongo-2.5.2-py2.7-macosx-10.7-intel.egg/bson/_cbson.py", line 4, in __bootstrap__
File "build/bdist.macosx-10.7-intel/egg/pkg_resources.py", line 914, in resource_filename
%s
File "build/bdist.macosx-10.7-intel/egg/pkg_resources.py", line 1601, in get_resource_filename
"""Retrieve a PEP 302 "importer" for the given path item
File "build/bdist.macosx-10.7-intel/egg/pkg_resources.py", line 1629, in _extract_resource
from pkgutil import get_importer, ImpImporter
File "build/bdist.macosx-10.7-intel/egg/pkg_resources.py", line 990, in get_cache_path
AttributeError: ResourceManager instance has no attribute '_warn_unsafe_extraction'