Reading all the lines in one go will not be efficient if the file is large. Following is a possible solution:
def read_write_batch(inpath, outpath, n):
with open(inpath) as infile, open(outpath, 'w') as outfile:
batch = []
for line in infile:
batch.append(line.strip())
if len(batch) == n:
outfile.write(':'.join(batch))
outfile.write('\n')
batch = []
if __name__ == '__main__':
read_write_batch('/tmp/test.txt', '/tmp/out.txt', 4)