I want to process clients in different processes asynchronously, that is, each process can process many client connections asynchronously. The code I have at the moment:
import os, socket
import asyncio
from multiprocessing import Process, Queue
async def process_client(client: socket) -> None:
loop = asyncio.get_event_loop()
data = await loop.sock_recv(client, 256)
def main_process(q: Queue) -> None:
loop = asyncio.get_event_loop()
while True:
try:
client, addr = q.get(timeout=1)
loop.create_task(process_client(client))
except:
pass
def main() -> None:
server_ip = os.environ['SERVER_IP']
server_port = int(os.environ['SEVER_PORT'])
q = Queue()
for _ in range(8):
worker = Process(target=main_process, args=(q, ))
worker.start()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server:
server.bind((server_ip, server_port))
server.listen(100)
while True:
client, addr = server.accept()
q.put((client, addr))
if __name__ == '__main__':
main()
create a task does not create a task because the while loop and q.get(timeout=1) block the thread. If the multiprocessing queue was asynchronous, then I would be able to do for example this: client, addr = await q.get()