I wrote script which create animation (movie) from fits files. One file has size 2.8 MB and the no. of files is 9000. Here is code
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import os
import pyfits
import glob
import re
Writer = animation.writers['ffmpeg']
writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
global numbers
numbers=re.compile(r'(\d+)')
def numericalSort(value):
parts = numbers.split(value)
parts[1::2] = map(int, parts[1::2])
return parts
image_list=glob.glob('/kalib/*.fits')
image_list= sorted(image_list,key=numericalSort)
print image_list
fig = plt.figure("movie")
img = []
for i in range(0,len(image_list)):
hdulist = pyfits.open(image_list[i])
im = hdulist[0].data
img.append([plt.imshow(im,cmap=plt.cm.Greys_r)])
ani = animation.ArtistAnimation(fig,img, interval=20, blit=True,repeat_delay=0)
ani.save('movie.mp4', writer=writer)
I think that my problem is when I create array img[]...I have 8 GB RAM and when the RAM is full my operating system terminate python script.
My question is: How I can read 9000 files and create animation? Is possible create some buffer or some parallel processing?
Any suggestion?