I am trying to SFTP a file to a remote server in chunks using threads and the python paramiko library.
It opens a local file and sftp chunks to the remote server in different threads.
I am basically following this solution which uses the same approach to download large file over SFTP. I would like to send large files instead. Downloading solution
However, I'm getting in write_chunks()
on the line for chunk in infile.readv(chunks):
in getting this error:
AttributeError: '_io.BufferedReader' object has no attribute 'readv'
Could anybody assist with this error please. I thought that infile
is a file descriptor. I don't understand why it is an _io.BufferedReader object
.
import threading, os, time, paramiko
import time, paramiko
MAX_RETRIES = 10
ftp_server = "server.com"
port = 22
remote_file = "/home/filecopy.bin"
local_file = "/home/file.bin"
ssh_conn = sftp_client = None
username = "none"
password = "none"
#you could make the number of threads relative to file size
NUM_THREADS = 2
MAX_RETRIES = 10
def make_filepart_path(file_path, part_number):
"""creates filepart path from filepath"""
return "%s.filepart.%s" % (file_path, part_number+1)
def write_chunks(chunks, tnum, remote_file_part, username, password, ftp_server, max_retries):
ssh_conn = sftp_client = None
for retry in range(max_retries):
try:
ssh_conn = paramiko.Transport((ftp_server, port))
ssh_conn.connect(username=username, password=password)
sftp_client = paramiko.SFTPClient.from_transport(ssh_conn)
with sftp_client.open(remote_file_part, "wb") as outfile:
with open(local_file, "rb") as infile:
for chunk in infile.readv(chunks):
outfile.write(chunk)
break
except (EOFError, paramiko.ssh_exception.SSHException, OSError) as x:
retry += 1
print("%s %s Thread %s - > retrying %s..." % (type(x), x, tnum, retry))
time.sleep(abs(retry) * 10)
finally:
if hasattr(sftp_client, "close") and callable(sftp_client.close):
sftp_client.close()
if hasattr(ssh_conn, "close") and callable(ssh_conn.close):
ssh_conn.close()
start_time = time.time()
for retry in range(MAX_RETRIES):
try:
ssh_conn = paramiko.Transport((ftp_server, port))
ssh_conn.connect(username=username, password=password)
sftp_client = paramiko.SFTPClient.from_transport(ssh_conn)
# connect to get the file's size in order to calculate chunks
#filesize = sftp_client.stat(remote_file).st_size
filesize = os.stat(local_file).st_size
sftp_client.close()
ssh_conn.close()
chunksize = pow(2, 12)
chunks = [(offset, chunksize) for offset in range(0, filesize, chunksize)]
thread_chunk_size = (len(chunks) // NUM_THREADS) + 1
# break the chunks into sub lists to hand off to threads
thread_chunks = [chunks[i:i+thread_chunk_size] for i in range(0, len(chunks) - 1, thread_chunk_size)]
threads = []
fileparts = []
for thread_num in range(len(thread_chunks)):
remote_file_part = make_filepart_path(remote_file, thread_num)
args = (thread_chunks[thread_num], thread_num, remote_file_part, username, password, ftp_server, MAX_RETRIES)
threads.append(threading.Thread(target=write_chunks, args=args))
fileparts.append(remote_file_part)
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# join file parts into one file, remove fileparts
with sftp_client.open(remote_file_part, "wb") as outfile:
for filepart in fileparts:
with open(filepart, "rb") as infile:
outfile.write(infile.read())
os.remove(filepart)
break
except (EOFError, paramiko.ssh_exception.SSHException, OSError) as x:
retry += 1
print("%s %s - > retrying %s..." % (type(x), x, retry))
time.sleep(abs(retry) * 10)
finally:
if hasattr(sftp_client, "close") and callable(sftp_client.close):
sftp_client.close()
if hasattr(ssh_conn, "close") and callable(ssh_conn.close):
ssh_conn.close()
print("Loading File %s Took %d seconds " % (sftp_file, time.time() - start_time))
Stack trace:
Exception in thread Thread-4:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "simpleNNInference.py", line 210, in write_chunks
for chunk in infile.readv(chunks):
AttributeError: '_io.BufferedReader' object has no attribute 'readv'
Exception in thread Thread-3:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "simpleNNInference.py", line 210, in write_chunks
for chunk in infile.readv(chunks):
AttributeError: '_io.BufferedReader' object has no attribute 'readv'