Question

I have created a query to generate some data to sql databases, but generation of 1 GB data takes about 45 minutes. How to increase a performance of data generation?

DECLARE @RowCount INT
DECLARE @RowString VARCHAR(10)
DECLARE @Random INT
DECLARE @Upper INT
DECLARE @Lower INT
DECLARE @InsertDate DATETIME

SET @Lower = -730
SET @Upper = -1
SET @RowCount = 0

WHILE @RowCount < 3000000
BEGIN
 SET @RowString = CAST(@RowCount AS VARCHAR(10))
 SELECT @Random = ROUND(((@Upper - @Lower -1) * RAND() + @Lower), 0)
 SET @InsertDate = DATEADD(dd, @Random, GETDATE())

 INSERT INTO Table_1
  (q
  ,w
  ,e
  ,r
  ,t
  ,y)
 VALUES
  (REPLICATE('0', 10 - DATALENGTH(@RowString)) + @RowString
  , @InsertDate
  ,DATEADD(dd, 1, @InsertDate)
  ,DATEADD(dd, 2, @InsertDate)
  ,DATEADD(dd, 3, @InsertDate)
  ,DATEADD(dd, 4, @InsertDate))

 SET @RowCount = @RowCount + 1
END
Was it helpful?

Solution

You may try following also:

;with seq as (
    select top (3000000) N = row_number() over (order by @@spid) - 1 from sys.all_columns c1, sys.all_columns c2
)
INSERT INTO Table_1 (q, w, e, r, t, y)
select
    right('0000000000' + cast(N as varchar(10)), 10)
    ,p.InsertDate
    ,DATEADD(dd, 1, p.InsertDate)
    ,DATEADD(dd, 2, p.InsertDate)
    ,DATEADD(dd, 3, p.InsertDate)
    ,DATEADD(dd, 4, p.InsertDate)
from seq
    cross apply (select DATEADD(dd, ROUND(((@Upper - @Lower -1) * RAND(checksum(newid())) + @Lower), 0), GETDATE())) p(InsertDate)

OTHER TIPS

The problem is you are generating and inserting the data one row at a time. SQL Server is not designed to work that way. You need to find a set-based solution. This worked for me in under 30 seconds:

CREATE TABLE #Table_1 (
    Id INT IDENTITY(1,1)
    , RowString AS REPLICATE('0', 10 - LEN(CAST(Id AS VARCHAR))) + CAST(Id AS VARCHAR)
    , Date1 DATETIME
);

DECLARE @Upper INT = -1;
DECLARE @Lower INT = -730;

INSERT  #Table_1 (Date1)
SELECT  TOP 3000000 DATEADD(dd, ROUND(((@Upper - @Lower -1) * RAND(checksum(newid())) + @Lower), 0), GETDATE())
FROM    (   SELECT  number
            FROM    master..spt_values      
            WHERE   TYPE = 'P' AND number <= 2000   
        ) a (Number)
        ,(  SELECT  number      
            FROM    master..spt_values      
            WHERE   TYPE = 'P' AND number <= 2000
        ) b (Number);

Once you have the above data in the #Table_1 temp table, it is a simple matter to insert it into Table_1:

INSERT Table_1 (q,w,e,r,t,y)
SELECT RowString, Date1, Date1 + 1, Date1 + 2, Date1 + 3, Date1 + 4
FROM    #Table_1;

The biggest bottleneck you have in your sql has to do with logging, as each insert statement generates a log in the transaction log.

Although table variables are normally used in small to medium volumes of data, I think you can use them to your benefit, since they don't participate in transactions, logging or locking...

Here's the appropriate sample code:

--First declare the table variable
DECLARE @TempTable TABLE
(
    q VARCHAR(10),
    w DATETIME,
    e DATETIME,
    r DATETIME,
    t DATETIME,
    y DATETIME
)

...

WHILE @RowCount < 3000000
BEGIN
    ...

    -- Insert each row into the table variable, no logging is generated here
    INSERT INTO @TempTable
    (q
    ,w
    ,e
    ,r
    ,t
    ,y)
    VALUES
    (REPLICATE('0', 10 - DATALENGTH(@RowString)) + @RowString
    , @InsertDate
    ,DATEADD(dd, 1, @InsertDate)
    ,DATEADD(dd, 2, @InsertDate)
    ,DATEADD(dd, 3, @InsertDate)
    ,DATEADD(dd, 4, @InsertDate))

    ...
END

-- Bulk Insert the generated data, again no logging should be generated here
INSERT INTO Table_1 WITH(TABLOCK)
SELECT * FROM @TempTable
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top