Question

I sending file to SqlFileStream in parts. Like this:

public void StreamFile(int storageId, Stream stream)
{
    var select = string.Format(
                 @"Select TOP(1) Content.PathName(),
                 GET_FILESTREAM_TRANSACTION_CONTEXT() FROM FileStorage WHERE FileStorageID={0}",
                 storageId);

    using (var conn = new SqlConnection(this.ConnectionString))
    {
        conn.Open();
        var sqlTransaction = conn.BeginTransaction(IsolationLevel.ReadCommitted);

        string serverPath;
        byte[] serverTxn;
        using (var cmd = new SqlCommand(select, conn))
        {
            cmd.Transaction = sqlTransaction;
            using (SqlDataReader rdr = cmd.ExecuteReader())
            {
                rdr.Read();
                serverPath = rdr.GetSqlString(0).Value;
                serverTxn = rdr.GetSqlBinary(1).Value;
                rdr.Close();
            }
        }

        this.SaveFile(stream, serverPath, serverTxn);
        sqlTransaction.Commit();
    }
}

private void SaveFile(Stream clientStream, string serverPath, byte[] serverTxn)
{
    const int BlockSize = 512;
    using (var dest = new SqlFileStream(serverPath, serverTxn, 
    FileAccess.ReadWrite, FileOptions.SequentialScan, 0))
    {    
        var buffer = new byte[BlockSize];
        int bytesRead;
        dest.Seek(dest.Length, SeekOrigin.Begin);
        while ((bytesRead = clientStream.Read(buffer, 0, buffer.Length)) > 0)
        {
            dest.Write(buffer, 0, bytesRead);
        }
    }
    clientStream.Close();
}

I tryed upload file in 10 parts. When I look up folder where is hold data I saw 10 files. For each additional parts SqlFilesStream creates a new file. The last file holding all data but rest file unnecessarily wasted space. Is possible holding all data in one file - last append operation?

Was it helpful?

Solution

SqlFileStream doesn't support "partial updates". Each time the content changes, a new file is written to disk. These extra files are de-referenced and will eventually be cleaned up, but until they are, they will affect your backups/logs/etc

If you want all the data in a single file, you will need to process the entire file in a single stream.

If you don't need to support large files ( > 2 GB) and you expect lots of small partial updates, you might be better off storing the file in a VARBINARY(MAX) column instead.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top