Here's some code for you to look at.
This starts with recognizing that the file is already a collection of single elements. Therefore we can do the first grouping/sorting when we read the file. Since arrays are very impractical for this part I used Lists and then cast the return to int[][]
static int[][] makearrays(string filename)
{
List<List<int>> outval = new List<List<int>>();
using(StreamReader sr = new StreamReader(filename))
{
while(!sr.EndOfStream)
{
int a = 0, b = 0;
a = int.Parse(sr.ReadLine());
if(!sr.EndOfStream)
b = int.Parse(sr.ReadLine());
else
{
outval.Add(new List<int>() { a });
break;
}
if(a > b)
outval.Add(new List<int>() { b, a });
else
outval.Add(new List<int>() { a, b });
}
}
return outval.Select(x => x.ToArray()).ToArray();
}
With this array we can start the rest of the grouping/sorting
This uses recursion but has a minimal memory footprint:
static int[][] dosort(int[][] input)
{
if(input.Length == 1)
return input;
int i = 1, m = 0;
for(; i < input.Length; i += 2)
{
int limit = Math.Min(input[i].Length, input[i - 1].Length);
int[] temp = new int[input[i].Length + input[i - 1].Length];
int j = 0, k = 0, l = 0;
while(j < input[i].Length && k < input[i - 1].Length)
{
if(input[i][j] < input[i - 1][k])
{
temp[l++] = input[i][j++];
}
else
temp[l++] = input[i - 1][k++];
}
while(l < temp.Length)
{
if(j < input[i].Length)
temp[l++] = input[i][j++];
if(k < input[i - 1].Length)
temp[l++] = input[i - 1][k++];
}
input[m++] = temp;
}
if(input.Length % 2 == 1)
input[m++] = input.Last();
input = input.Take(m).ToArray();
return dosort(input);
}
In my tests the 100000 element file was sorted in less than a quarter of a second, including reading it into memory.