In SQL Server, I'm trying to do a comparative analysis between two different table structures with regard to insert performance given different keys. Does it matter if I use a table variable to do this testing, or should I use a temporary table? Or do I need to go to the trouble of actually creating the tables and indexes?
Specifically, I'm currently using the following script:
DECLARE @uniqueidentifierTest TABLE
(
--yes, this is terrible, but I am looking for numbers on how bad this is :)
tblIndex UNIQUEIDENTIFIER PRIMARY KEY CLUSTERED,
foo INT,
blah VARCHAR(100)
)
DECLARE @intTest TABLE
(
tblindex INT IDENTITY(1,1) PRIMARY KEY CLUSTERED,
foo INT,
blah VARCHAR(100)
)
DECLARE @iterations INT = 250000
DECLARE @ctrl INT = 1
DECLARE @guidKey UNIQUEIDENTIFIER
DECLARE @intKey INT
DECLARE @foo INT = 1234
DECLARE @blah VARCHAR(100) = 'asdfjifsdj fds89fsdio23r'
SET NOCOUNT ON
--test uniqueidentifier pk inserts
PRINT 'begin uniqueidentifier insert test at ' + CONVERT(VARCHAR(50), GETDATE(), 109)
WHILE @ctrl < @iterations
BEGIN
SET @guidKey = NEWID()
INSERT INTO @uniqueidentifierTest (tblIndex, foo, blah)
VALUES (@guidKey, @foo, @blah)
SET @ctrl = @ctrl + 1
END
PRINT 'end uniqueidentifier insert test at ' + CONVERT(VARCHAR(50), GETDATE(), 109)
SET @CTRL = 1
--test int pk inserts
PRINT 'begin int insert test at ' + CONVERT(VARCHAR(50), GETDATE(), 109)
WHILE @ctrl < @iterations
BEGIN
INSERT INTO @intTest (foo, blah)
VALUES (@foo, @blah)
SET @ctrl = @ctrl + 1
END
PRINT 'end int insert test at ' + CONVERT(VARCHAR(50), GETDATE(), 109)
SET NOCOUNT OFF