This snippet demonstrates how to efficiently check the status of multiple URLs concurrently using Python’s asyncio and aiohttp. This is useful for monitoring website availability, performing bulk health checks, or validating links in a scalable way.
import asyncio
import aiohttp
from typing import List, Dict
async def check_url(session: aiohttp.ClientSession, url: str) -> Dict[str, str]:
try:
async with session.get(url, timeout=5) as response:
return {"url": url, "status": response.status, "error": None}
except Exception as e:
return {"url": url, "status": None, "error": str(e)}
async def check_urls(urls: List[str]) -> List[Dict[str, str]]:
async with aiohttp.ClientSession() as session:
tasks = [check_url(session, url) for url in urls]
return await asyncio.gather(*tasks)
if __name__ == "__main__":
urls = [
"https://google.com",
"https://github.com",
"https://nonexistent.example.com",
]
results = asyncio.run(check_urls(urls))
for result in results:
print(f"URL: {result['url']}, Status: {result['status']}, Error: {result['error']}")
asyncio and aiohttp:
pip install aiohttp
url_checker.py).urls list with your target URLs.python url_checker.py
This snippet avoids disk/file operations, GUIs, or niche libraries, focusing on a scalable networking task with modern Python.