import aiohttp
import asyncio
async def fetch_data(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.json()
async def fetch_multiple_urls(urls):
tasks = [fetch_data(url) for url in urls]
return await asyncio.gather(*tasks)
async def main():
api_urls = [
'https://api.example.com/data/1',
'https://api.example.com/data/2',
'https://api.example.com/data/3'
]
results = await fetch_multiple_urls(api_urls)
for i, result in enumerate(results, 1):
print(f"Data from API {i}: {result}")
if __name__ == "__main__":
asyncio.run(main())
This code demonstrates how to make multiple HTTP requests concurrently using Python’s asyncio and aiohttp libraries. It solves the common problem of efficiently fetching data from multiple API endpoints without waiting for each request to complete sequentially.
pip install aiohttp
python script_name.py
asyncio.run(main())
with:
loop = asyncio.get_event_loop()
loop.run_until_complete(main())