Nikolay Novik
But what to do when available libraries work in sync way, potentially blocking event loop?
You do not block event loop
You never block event loop
import asyncio import time loop = asyncio.get_event_loop() loop.slow_callback_duration = 0.01 async def sleeper(): time.sleep(0.1) # we block here loop.run_until_complete(sleeper())
Executing <Task finished coro=<sleeper() done, defined at code/debug_example.py:9> result=None created at /usr/local/lib/python3.5/asyncio/base_events.py:323> took 0.102 seconds
Search asyncio compatible library on:
google ~ 98k results pypi ~200 packages asyncio wiki page: https://github.com/python/asyncio/wiki/ThirdParty aio-libs: https://github.com/aio-libs@asyncio.coroutine def get_async(self): """Sends the GET request using an asyncio coroutine .... """ future = self._client._loop.run_in_executor(None, self.get) collection_response = yield from future return collection_responseMost of the time you want to do HTTP requests using event loop not thread pool.
import asyncio import aiohttp # carry the loop Luke! loop = asyncio.get_event_loop() async def go(): session = aiohttp.ClientSession(loop=loop) async with session.get('http://python.org') as resp: data = await resp.text() print(data) session.close() loop.run_until_complete(go())Connection pooling helps to save on expensive connection creation. (PS: checkout new aiohttp 0.18.x release)
Example of databases and message queues with binary protocol:
Do not afraid to get your hands dirty.
import asyncio, struct from aiogibson import encode_command async def read_from_connection(host, port, *, loop=None): reader, writer = await asyncio.open_connection( host, port, loop=loop) cmd = encode_command(b'GET', 'key') writer.write(cmd) header = await reader.readexactly(4 + 2 + 1) unpacked = struct.unpack(b'<HBI', header) code, gb_encoding, resp_size = unpacked payload = await reader.readexactly(resp_size) print(payload)Simple but no protocol pipelining.
def execute(self): cmd = encode_command(b'GET', 'key') self.writer.write(cmd) fut = asyncio.Future(loop=self._loop) self._queue.append(fut) return fut async def reader_task(self): while True: header = await self.reader.readexactly(4 + 2 + 1) unpacked = struct.unpack(b'<HBI', header) code, gb_encoding, resp_size = unpacked # wait and read payload payload = await reader.readexactly(resp_size) future = self._queue.pop() future.set_result(payload)See aioredis for reference implementation.
import asyncio from pyodbc import connect loop = asyncio.get_event_loop() executor = ThreadPoolExecutor(max_workers=4) async def test_example(): dsn = 'Driver=SQLite;Database=sqlite.db' conn = await loop.run_in_executor(executor, connect, dsn) cursor = await loop.run_in_executor(executor, conn.cursor) conn = await loop.run_in_executor(executor, cursor.execute, 'SELECT 42;') loop.run_until_complete(test_example())
requests.get()For Cython
with nogil: [code to be executed with the GIL released]
For C extension
Py_BEGIN_ALLOW_THREADS ret = SQLDriverConnect(hdbc, 0, szConnect, SQL_NTS, 0, 0, 0, SQL_DRIVER_NOPROMPT); Py_END_ALLOW_THREADS
asyncio does not support asynchronous operations on the filesystem due to OS limitations.
Only good way to use files asynchronously by using thread pools.aiofiles library workaround
async def go(): f = await aiofiles.open('filename', mode='r') try: data = await f.read() finally: await f.close() print(data) loop.run_until_complete(go())On background aiofiles uses ThreadPoolExecutor for blocking calls.
loop = asyncio.get_event_loop() executor = ProcessPoolExecutor(max_workers=3) def is_prime(n): if n % 2 == 0: return False sqrt_n = int(math.floor(math.sqrt(n))) for i in range(3, sqrt_n + 1, 2): if n % i == 0: return False return True async def go(): n = 112272535095293 result = await loop.run_in_executor(executor, is_prime, n) loop.run_until_complete(go(loop, executor))