sexta-feira, 9 de agosto de 2019

SQL Server: Size Table and Total Index size ocupped


Complementando um post anterior Consulta Index ( Size, Unused, Clustered, NoClustered)(https://luizflsilva.blogspot.com/2019/06/consulta-index-size-unused-clustered.html)

A consulta abaixo possibilita validar o tamanho de cada tabela em KB, MB e GB, e também um sub total dessa tabela incluindo o espaço ocupado pelos indices existentes na tabela.




IF OBJECT_ID('tempdb..#SpaceUsed') IS NOT NULL DROP TABLE #SpaceUsed

CREATE TABLE #SpaceUsed 
(
 TableName sysname ,
 [Rows] int ,
 [Reserved] varchar(20),
 [Data] varchar(20),
 [Index_Size] varchar(20),
 [Unused] varchar(20),
 [Reserved_KB] bigint,
 [Data_KB] bigint,
 [Index_Size_KB] bigint,
 [Unused_KB] bigint
)

DECLARE @CMD NVARCHAR(MAX) =''
SELECT @CMD +='EXEC sp_spaceused ' +  ''''+QUOTENAME(TABLE_SCHEMA)+'.'+ QUOTENAME(TABLE_NAME)+''''+';'+CHAR(10)

FROM INFORMATION_SCHEMA.TABLES 
WHERE TABLE_TYPE = 'BASE TABLE'
PRINT @CMD

INSERT INTO #SpaceUsed (TableName ,[Rows] , [Reserved], [Data] , [Index_Size] , [Unused] )
EXEC sp_executesql @CMD



UPDATE #SpaceUsed 
SET [Reserved_KB] = CONVERT(BIGINT,RTRIM(LTRIM(REPLACE([Reserved] , ' KB', '')))),
    [Data_KB] = CONVERT(BIGINT,RTRIM(LTRIM(REPLACE([Data] , ' KB', '')))),
    [Index_Size_KB]= CONVERT(BIGINT,RTRIM(LTRIM(REPLACE([Index_Size] , ' KB', '')))),
    [Unused_KB]= CONVERT(BIGINT,RTRIM(LTRIM(REPLACE([Unused] , ' KB', ''))))


SELECT 
 TableName, 
 [Rows], 
 Reserved_KB , 
 Data_KB , 
 Index_Size_KB, 
 Unused_KB,  
 ROUND(CAST(Data_KB / 1024.0 AS FLOAT),2) Data_MB,
 ROUND(CAST(Data_KB / 1024.0 / 1024.0 AS FLOAT),2) Data_GB
FROM #SpaceUsed
ORDER BY Data_KB DESC 

SELECT 
 SUM(Reserved_KB) Reserved_KB , 
 SUM(Data_KB) Data_KB,
 ROUND(CAST(SUM(Index_Size_KB / 1024.0 / 1024.0) AS FLOAT),2) Index_Size_GB, 
 SUM(Unused_KB) Unused_KB ,SUM(Data_KB / 1024.0) Data_MB , 
 ROUND(CAST(SUM(Data_KB / 1024.0 / 1024.0) AS FLOAT),2) Data_GB
FROM #SpaceUsed

DROP TABLE #SpaceUsed

Migrando Blog https://luizflsilva.wordpress.com/

Curtindo cada vez mais a criar conteudo que possa levar aprendizado a amantes e profissionais da area de banco de dados, estou migrando o bl...