SSLCertVerificationError with Jupyter notebook for GoogleNews pip

184 views Asked by At

I'm fairly new in using this and I'm not sure how to solve this SSLCertVerificationError I encountered while following this Tutorial on Youtube

I'm trying to scrape Google News for certain keywords but when I input " gn.search('keyword') ", this is the error I get:


SSLCertVerificationError                  Traceback (most recent call last)
C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, \*\*response_kw)
669             # Make the request on the httplib connection object.
\--\> 670             httplib_response = self.\_make_request(
671                 conn,

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\connectionpool.py in \_make_request(self, conn, method, url, timeout, chunked, \*\*httplib_request_kw)
380         try:
\--\> 381             self.\_validate_conn(conn)
382         except (SocketTimeout, BaseSSLError) as e:

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\connectionpool.py in \_validate_conn(self, conn)
977         if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
\--\> 978             conn.connect()
979

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\connection.py in connect(self)
361
\--\> 362         self.sock = ssl_wrap_socket(
363             sock=conn,

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\util\\ssl\_.py in ssl_wrap_socket(sock, keyfile, certfile, cert_reqs, ca_certs, server_hostname, ssl_version, ciphers, ssl_context, ca_cert_dir, key_password, ca_cert_data)
385         if HAS_SNI and server_hostname is not None:
\--\> 386             return context.wrap_socket(sock, server_hostname=server_hostname)
387

C:\\Program Files\\Anaconda3\\lib\\ssl.py in wrap_socket(self, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname, session)
499         # ctx.\_wrap_socket()
\--\> 500         return self.sslsocket_class.\_create(
501             sock=sock,

C:\\Program Files\\Anaconda3\\lib\\ssl.py in \_create(cls, sock, server_side, do_handshake_on_connect, suppress_ragged_eofs, server_hostname, context, session)
1039                         raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
\-\> 1040                     self.do_handshake()
1041             except (OSError, ValueError):

C:\\Program Files\\Anaconda3\\lib\\ssl.py in do_handshake(self, block)
1308                 self.settimeout(None)
\-\> 1309             self.\_sslobj.do_handshake()
1310         finally:

SSLCertVerificationError: \[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: unable to get local issuer certificate (\_ssl.c:1123)

During handling of the above exception, another exception occurred:

MaxRetryError                             Traceback (most recent call last)
C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
438             if not chunked:
\--\> 439                 resp = conn.urlopen(
440                     method=request.method,

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, chunked, body_pos, \*\*response_kw)
725
\--\> 726             retries = retries.increment(
727                 method, url, error=e, \_pool=self, \_stacktrace=sys.exc_info()\[2\]

C:\\Program Files\\Anaconda3\\lib\\site-packages\\urllib3\\util\\retry.py in increment(self, method, url, response, error, \_pool, \_stacktrace)
445         if new_retry.is_exhausted():
\--\> 446             raise MaxRetryError(\_pool, url, error or ResponseError(cause))
447

MaxRetryError: HTTPSConnectionPool(host='news.google.com', port=443): Max retries exceeded with url: /rss/search?q=ChatGPT&ceid=MALAYSIA:en&hl=en&gl=MALAYSIA (Caused by SSLError(SSLCertVerificationError(1, '\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: unable to get local issuer certificate (\_ssl.c:1123)')))

During handling of the above exception, another exception occurred:

SSLError                                  Traceback (most recent call last)
\<ipython-input-35-ee2fc9be4b9d\> in \<module\>
\----\> 1 gn.search('ChatGPT')

C:\\Program Files\\Anaconda3\\lib\\site-packages\\pygooglenews\__init_\_.py in search(self, query, helper, when, from\_, to\_, proxies, scraping_bee)
152         search_ceid = search_ceid.replace('?', '&')
153
\--\> 154         d = self.\__parse_feed(self.BASE_URL + '/search?q={}'.format(query) + search_ceid, proxies = proxies, scraping_bee=scraping_bee)
155
156         d\['entries'\] = self.\__add_sub_articles(d\['entries'\])

C:\\Program Files\\Anaconda3\\lib\\site-packages\\pygooglenews\__init_\_.py in \__parse_feed(self, feed_url, proxies, scraping_bee)
65             r = requests.get(feed_url, proxies = proxies)
66         else:
\---\> 67             r = requests.get(feed_url)
68
69         if scraping_bee:

C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\api.py in get(url, params, \*\*kwargs)
74
75     kwargs.setdefault('allow_redirects', True)
\---\> 76     return request('get', url, params=params, \*\*kwargs)
77
78

C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\api.py in request(method, url, \*\*kwargs)
59     # cases, and look like a memory leak in others.
60     with sessions.Session() as session:
\---\> 61         return session.request(method=method, url=url, \*\*kwargs)
62
63

C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json)
528         }
529         send_kwargs.update(settings)
\--\> 530         resp = self.send(prep, \*\*send_kwargs)
531
532         return resp

C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\sessions.py in send(self, request, \*\*kwargs)
641
642         # Send the request
\--\> 643         r = adapter.send(request, \*\*kwargs)
644
645         # Total elapsed time of the request (approximately)

C:\\Program Files\\Anaconda3\\lib\\site-packages\\requests\\adapters.py in send(self, request, stream, timeout, verify, cert, proxies)
512             if isinstance(e.reason, \_SSLError):
513                 # This branch is for urllib3 v1.22 and later.
\--\> 514                 raise SSLError(e, request=request)
515
516             raise ConnectionError(e, request=request)

SSLError: HTTPSConnectionPool(host='news.google.com', port=443): Max retries exceeded with url: /rss/search?q=ChatGPT&ceid=MALAYSIA:en&hl=en&gl=MALAYSIA (Caused by SSLError(SSLCertVerificationError(1, '\[SSL: CERTIFICATE_VERIFY_FAILED\] certificate verify failed: unable to get local issuer certificate (\_ssl.c:1123)')))\

Can someone help me figure out how I can fix this :(

1

There are 1 answers

0
Adep Sainaveen On

Use (verify = False) to resolve the SSLCertVerificationError

from bs4 import BeautifulSoup as bs
from urllib.request import urlopen as urReq
import requests

flipkart_url = "https://www.flipkart.com/search?q=" + "iphone15"
response_website = requests.get(flipkart_url , verify=False)

print(response_website.text)

# response_website = requests.get(flipkart_url , verify=False)