I'd use a Queue from Toro, which is designed for coroutines to cooperate like this. Here's a simple example:
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from toro import Queue
q = Queue(maxsize=1)
@gen.coroutine
def consumer():
item = yield q.get()
while item:
print item
item = yield q.get()
@gen.coroutine
def producer():
try:
client = AsyncHTTPClient()
for url in [
'http://tornadoweb.org',
'http://python.org',
'http://readthedocs.org']:
response = yield client.fetch(url)
item = (url, len(response.body))
yield q.put(item)
# Done.
q.put(None)
except Exception:
IOLoop.current().stop()
raise
future = producer()
IOLoop.current().run_sync(consumer, timeout=20)
A more detailed web crawler example is in Toro's docs, here:
https://toro.readthedocs.org/en/stable/examples/web_spider_example.html