0

Actually the docker python sdk is working fine: https://docker-py.readthedocs.io/en/stable/client.html

But I tried to perform docker exec with asyncio package simultaneously. It seems not to be possible? How could jenkins do it?

asyncio code:

import asyncio

async def factorial(name, number):
  print(number)
  await asyncio.sleep(number)
  print(name)

async def main():
    # Schedule three calls *concurrently*:
    L = await asyncio.gather(
        factorial("A", 2),
        factorial("B", 3),
        factorial("C", 4),
    )
    print(L)

asyncio.run(main())

and now the docker code:

async def gogo():
  client = docker.DockerClient(base_url='unix://var/run/docker.sock')
  container = client.containers.create(image_parsed, detach=True, stdin_open=True, tty=True, entrypoint="bash")
  container.start()
  res = container.exec_run(cmd='bash -c "echo hello stdout ; sleep 3s; echo hello stderr >&2; ls -a"', stream=True, demux=False)
  #container.wait()
  while True:
    try:
      print(next(res.output))
    except:
      break

  container.stop()
  container.remove()

async def gogo_group():

  print(f"started at {time.strftime('%X')}")

  L = await asyncio.gather(
      gogo(),
      gogo()
  )
  print(L)

  print(f"finished at {time.strftime('%X')}")


asyncio.run(gogo_group())

You can observe the asyncio code to be executed simultaneously, but the docker code is executed sequentially. Any idea how to solve this?

Due to Paul Cornelius comment I changed code, but it doesnt help:

async def async_wrap(container):
  return container.exec_run(cmd='bash -c "echo hello stdout ; sleep 3s; echo hello stderr >&2; ls -a"', stream=True, demux=False)

async def gogo():
  client = docker.DockerClient(base_url='unix://var/run/docker.sock')
  container = client.containers.create(image_parsed, detach=True, stdin_open=True, tty=True, entrypoint="bash")
  container.start()
  res = await async_wrap(container)
  #container.wait()
  while True:
    try:
      print(next(res.output))
    except:
      break

  container.stop()
  container.remove()

async def gogo_group():

  print(f"started at {time.strftime('%X')}")

  L = await asyncio.gather(
      gogo(),
      gogo()
  )
  print(L)

  print(f"finished at {time.strftime('%X')}")
Nikolai Ehrhardt
  • 570
  • 4
  • 14

1 Answers1

0

Multiprocessing is doing it concurrently:

import multiprocessing
import time
import os

import docker, json

#res = container.exec_run(cmd='bash -c "echo hello stdout ; echo hello stderr >&2"', stream=True, demux=True)

if __name__ == '__main__':

    def async_wrap(container):
      res = container.exec_run(cmd='bash -c "echo hello stdout ; sleep 3s; echo hello stderr >&2; ls -a"', stream=True, demux=False)
      while True:
        try:
          print(next(res.output))
        except:
          break

    client = docker.DockerClient(base_url='unix://var/run/docker.sock')
    container = client.containers.create(image_parsed, detach=True, stdin_open=True, tty=True, entrypoint="bash")
    container.start()


    print(f"started at {time.strftime('%X')}")
    p = multiprocessing.Process(target=async_wrap, args=(container,))
    p.start()

    h = multiprocessing.Process(target=async_wrap, args=(container,))
    h.start()
    h.join()
    p.join()
    print(f"finished at {time.strftime('%X')}")

    container.stop()
    container.remove()
Nikolai Ehrhardt
  • 570
  • 4
  • 14