To see whether the issue is with multithreading, you could try to use multiple processes instead:
#!/usr/bin/env python
####from multiprocessing.dummy import Pool # use threads
from multiprocessing import Pool # use processes
from multiprocessing import freeze_support
import feedparser
def fetch_rss(url):
try:
data = feedparser.parse(url)
except Exception as e:
return url, None, str(e)
else:
e = data.get('bozo_exception')
return url, data['entries'], str(e) if e else None
if __name__=="__main__":
freeze_support()
with open('urls.txt') as file:
urls = (line.strip() for line in file if line.strip())
pool = Pool(20) # no more than 20 concurrent downloads
for url, items, error in pool.imap_unordered(fetch_rss, urls):
if error is None:
print(url, len(items))
else:
print(url, error)