For these benchmarks I used a scale, which was how large the FileSteam
buffersize was to determine if there was any significant difference. I ran the tests 10 times each and average, in Release (64Bit)
mode and .NET Framework 4.7.1
I used a randomized buffer new double[8, 2000000];
to generate the tests.
Results
Mode : Release (64Bit)
Test Framework : .NET Framework 4.7.1
Benchmarks Runs : 10 times (averaged)
Scale : 4,096, Test Data : Standard input
Value | Average | Fastest | StDv | Cycles | Pass | Gain |
------------------------------------------------------------------------------------------------
WriteFile Unsafe | 384.122 ms | 275.073 ms | 58.90 | 1,317,292,586 | Pass | 42.84 % |
BlockCopy | 389.389 ms | 305.094 ms | 57.68 | 1,335,451,612 | Pass | 42.05 % |
WriteFile Pinned | 422.704 ms | 341.646 ms | 67.66 | 1,418,871,963 | Pass | 37.09 % |
BinaryWriter | 671.966 ms | 608.900 ms | 58.63 | 2,260,807,206 | Base | 0.00 % |
BitConverter | 784.722 ms | 668.788 ms | 139.98 | 2,607,901,414 | Pass | -16.78 % |
Scale : 32,768, Test Data : Standard input
Value | Average | Fastest | StDv | Cycles | Pass | Gain |
----------------------------------------------------------------------------------------------
WriteFile Unsafe | 97.254 ms | 88.318 ms | 5.38 | 339,330,780 | Pass | 83.49 % |
WriteFile Pinned | 110.047 ms | 90.279 ms | 18.80 | 346,777,096 | Pass | 81.32 % |
BlockCopy | 115.805 ms | 106.119 ms | 7.40 | 403,209,891 | Pass | 80.34 % |
BinaryWriter | 589.168 ms | 530.255 ms | 60.64 | 1,985,585,629 | Base | 0.00 % |
BitConverter | 593.952 ms | 506.482 ms | 73.93 | 1,983,475,740 | Pass | -0.81 % |
Scale : 102,400, Test Data : Standard input
Value | Average | Fastest | StDv | Cycles | Pass | Gain |
----------------------------------------------------------------------------------------------
WriteFile Unsafe | 73.071 ms | 69.885 ms | 1.77 | 255,008,411 | Pass | 85.95 % |
WriteFile Pinned | 73.523 ms | 71.073 ms | 1.98 | 256,062,331 | Pass | 85.86 % |
BlockCopy | 82.068 ms | 78.838 ms | 1.79 | 286,872,838 | Pass | 84.22 % |
BinaryWriter | 519.943 ms | 471.578 ms | 46.01 | 1,778,713,946 | Base | 0.00 % |
BitConverter | 559.842 ms | 497.743 ms | 39.83 | 1,946,616,118 | Pass | -7.67 % |
Scale : 1,048,576, Test Data : Standard input
Value | Average | Fastest | StDv | Cycles | Pass | Gain |
-----------------------------------------------------------------------------------------------
WriteFile Pinned | 59.993 ms | 56.088 ms | 1.73 | 209,025,613 | Pass | 87.46 % |
WriteFile Unsafe | 61.783 ms | 56.266 ms | 8.09 | 206,988,059 | Pass | 87.08 % |
BlockCopy | 64.105 ms | 61.066 ms | 1.52 | 224,205,049 | Pass | 86.60 % |
BinaryWriter | 478.376 ms | 442.570 ms | 34.63 | 1,671,203,569 | Base | 0.00 % |
BitConverter | 550.557 ms | 493.186 ms | 42.27 | 1,916,031,041 | Pass | -15.09 % |
BlockCopy
private static void Write(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
var totalSize = h * w * sizeof(double);
using (var fs = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None, chunkSize))
{
var buffer = new byte[chunkSize];
for (var i = 0; i < totalSize; i += chunkSize)
{
var size = Math.Min(chunkSize, totalSize - i);
Buffer.BlockCopy(ary, i, buffer, 0, size);
fs.Write(buffer, 0, size);
}
}
}
BinaryWriter
private static void Write(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
using (var fs = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None, chunkSize))
using (var bw = new BinaryWriter(fs))
for (var i = 0; i < h; i++)
for (var j = 0; j < w; j++)
bw.Write(ary[i, j]);
}
WriteFile Pinned
private static unsafe void Write(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
var totalSize = h * w;
var s = chunkSize / sizeof(double);
using (var fs = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None, chunkSize))
{
var handle = default(GCHandle);
try
{
handle = GCHandle.Alloc(ary, GCHandleType.Pinned);
var p = (long*)handle.AddrOfPinnedObject()
.ToPointer();
var fileHandle = fs.SafeFileHandle.DangerousGetHandle();
for (var i = 0; i < totalSize; i += s)
{
var size = Math.Min(s, totalSize - i);
var p2 = p + i;
Kernel32.WriteFile(fileHandle, (IntPtr)p2, size * sizeof(double), out var n, IntPtr.Zero);
}
}
finally
{
if (handle.IsAllocated)
{
handle.Free();
}
}
}
}
WriteFile Pinned unsafe
private static unsafe void Write(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
var totalSize = h * w;
var s = chunkSize / sizeof(double);
using (var fs = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None, chunkSize))
{
var fileHandle = fs.SafeFileHandle.DangerousGetHandle();
fixed (double* p = ary)
{
for (var i = 0; i < totalSize; i += s)
{
var size = Math.Min(s, totalSize - i);
var p2 = p + i;
Kernel32.WriteFile(fileHandle, (IntPtr)p2, size * sizeof(double), out var n, IntPtr.Zero);
}
}
}
}
BitConverter
This just uses filestream and BitConverter.GetBytes
private static void Write(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
using (var fs = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None, chunkSize))
for (var i = 0; i < h; i++)
for (var j = 0; j < w; j++)
fs.Write(BitConverter.GetBytes(ary[i, j]), 0, 8);
}
Summary
This was extremely fiddly to test and get right, however all the solutions are tested and they write the whole array continuously to file in raw bytes of a double
.
At first xanatos's version using pinned array seemed really slow, which is not shown here, it took me a while to figure out what was actually going on. it turns out that writing the whole array to a file straight away seem to be the slowest. It maybe because the flushing isn't happening incrementally and happening when the file closes, I'm not sure but I suspect it may be trying to write it all at once.
However, when I tweaked this to write in chunks it turns out to be the most consistent. Once again this was really hard to test though, we are fighting against various caches that are not easy to overcome; in the end I had to write separate files and still it seems like the OS was caching results.
Update
If you want to read the data back you can use the following:
private static void Read(double[,] ary, int chunkSize, string fileName)
{
var h = ary.GetLength(0);
var w = ary.GetLength(1);
var totalSize = h * w * sizeof(double);
using (var fs = new FileStream(fileName, FileMode.Open, FileAccess.Read, FileShare.None, chunkSize))
{
var buffer = new byte[chunkSize];
for (var i = 0; i < totalSize; i += chunkSize)
{
var size = Math.Min(chunkSize, totalSize - i);
fs.Read(buffer, 0, size);
Buffer.BlockCopy(buffer,0,ary , i, size);
}
}
}