It is a quite interesting problem you got your self into.
The problem with caching the file handlers is that a huge amount of file handlers can drain the system for resources making the program and windows perform bad.
If the number of devices in your database is not too high (less than 100) i think it would be safe to cache the handles.
Alternatively you could cache a million records distribute them to the different devices and save some and then read some more records.
You could place the records in a Dictionary like this:
class PunchInfo
{
public PunchInfo(DateTime time, int id)
{
Id = id;
Time = time;
}
public DateTime Time;
public int Id;
}
Dictionary<string, List<PunchInfo>> Devices;
int Count = 0;
const int Limit = 1000000;
const int LowerLimit = 90 * Limit / 100;
void SaveRecord(string device, int id, DateTime time)
{
PunchInfo info = new PunchInfo(time, id);
List<PunchInfo> list;
if (!Devices.TryGetValue(device, out list))
{
list = new List<PunchInfo>();
Devices.Add(device, list);
}
list.Add(info);
Count++;
if (Count >= Limit)
{
List<string> writeDevices = new List<string>();
foreach(KeyValuePair<string, List<PunchInfo>> item in Devices)
{
writeDevices.Add(item.Key);
Count -= item.Value.Count;
if (Count < LowerLimit) break;
}
foreach(string device in writeDevices)
{
List<PunchInfo> list = Devices[device];
Devices.Remove(device);
SaveDevices(device, list);
}
}
}
void SaveAllDevices()
{
foreach(KeyValuePair<string, List<PunchInfo>> item in Devices)
SaveDevices(item.Key, item.Value);
Devices.Clear();
}
This way you will avoid opening and closing files and have a lot of open files.
One million records takes up 20 MB memory, you could easily raise that to 10 million records without problems.