Handle exceptions in _crawl function by logging errors and returning None to prevent crashes
Browse files- source/eastmoney.py +2 -1
source/eastmoney.py
CHANGED
@@ -53,7 +53,8 @@ def _crawl(url, article, retries=3):
|
|
53 |
time.sleep(1) # Wait before retrying
|
54 |
continue
|
55 |
else:
|
56 |
-
|
|
|
57 |
html_text = text.decode("utf-8")
|
58 |
page = etree.HTML(html_text)
|
59 |
contentcn, summary = encode_content(
|
|
|
53 |
time.sleep(1) # Wait before retrying
|
54 |
continue
|
55 |
else:
|
56 |
+
print(e)
|
57 |
+
return None
|
58 |
html_text = text.decode("utf-8")
|
59 |
page = etree.HTML(html_text)
|
60 |
contentcn, summary = encode_content(
|