I’m writing a console app to compress a directory of large files (around 30) with each file coming in at around 100-300 MB, which will be done once per day (as new files come in). I’ve tried using the built in GZipStream class and it took about 15 seconds per file with a compression ratio of about 0.212. I was wondering if there is a more efficient way out there with 3rd party libraries or if there's some way to increase the compression ratio. Finally, is threading an option to speed this process up?
Here's the code Im currently using (basically its from the MSDN article on GZipStream)
private void CompressFile(FileInfo fileInfo)
{
// Get the stream of the source file.
using (FileStream inFile = fileInfo.OpenRead())
{
Timer.Reset();
// Prevent compressing hidden and
// already compressed files.
if ((File.GetAttributes(fileInfo.FullName) & FileAttributes.Hidden) != FileAttributes.Hidden & fileInfo.Extension != ".gz")
{
// Create the compressed file.
using (FileStream outFile = File.Create(fileInfo.FullName + ".gz"))
{
using (GZipStream Compress = new GZipStream(outFile, CompressionMode.Compress))
{
// Copy the source file into
// the compression stream.
Timer.Start();
inFile.CopyTo(Compress);
Timer.Stop();
Console.WriteLine("Compressed {0} from {1} to {2} bytes in {3} seconds.",
fileInfo.Name, fileInfo.Length.ToString(), outFile.Length.ToString(), ((double)Timer.ElapsedMilliseconds / 1000));
}
}
}
}
}
Thanks!