/ MicroSoft SQL Server

SQL_users_logins_sync_SIDs.sql

/* Run against the restored database */

SET NOCOUNT ON;

-- Check for database Users and server-level logins having the same names but
-- differing SIDs.  Generate an Alter User statement to remap the User to the
-- Login's SID.
SELECT  l.name AS LoginUserName, l.sid AS LoginSID, u.sid AS UserSID,
        'ALTER USER ' + QUOTENAME(u.name) + ' WITH LOGIN = ' +
        QUOTENAME(l.name) + ';' AS SyncCmd
FROM    sys.server_principals l
        INNER JOIN sys.database_principals u ON u.name = l.name
WHERE   l.type = 'S' AND
        u.sid <> l.sid;

/*
    -- Example of query output
    -- Execute generated statements to sync Users with Logins
    ALTER USER [AppUser] WITH LOGIN = [AppUser];


    -- Update the scripts below with a User/Login name then run them to verify
    -- that the SIDs now match
    SELECT name, sid FROM sys.server_principals WHERE name = N'AppUser';
    SELECT name, sid FROM sys.database_principals WHERE name = N'AppUser';

*/

SQL_Users_orphaned.sql

/* Run against the restored database */

SET NOCOUNT ON;

-- Check for database users without parent logins on the server
SELECT  name, type_desc, create_date, modify_date, sid
FROM    sys.database_principals
WHERE   type IN ( 'U', 'G', 'S' ) AND
        name NOT IN ( 'dbo', 'guest', 'INFORMATION_SCHEMA', 'sys' ) AND
        name NOT IN ( SELECT    name
                      FROM      sys.server_principals );

SQ Login_script_2015013

/*
    Based on Robert Davis' "Transferring Logins to a Database Mirror"
 
    Execute the script to generate scripts to transfer logins
*/

SET NOCOUNT ON;
USE master;
GO

DECLARE @LastId INT,
        @CurrentId INT,
        @SQLCmd NVARCHAR(MAX),
        @ServerPrincipal sysname,
        @IsDisabled INT,
        @Type CHAR(1),
        @SID VARBINARY(85),
        @SIDString NVARCHAR(100),
        @PasswordHash VARBINARY(256),
        @PasswordHashString NVARCHAR(300),
        @RoleName sysname,
        @Permissionstate NVARCHAR(60),
        @PermName sysname,
        @Class TINYINT,
        @MajorID INT;

DECLARE @Logins TABLE (
        LoginID INT IDENTITY(1, 1) NOT NULL PRIMARY KEY,
        PrincipalId INT NOT NULL,
        [Name] sysname NOT NULL,
        [SID] VARBINARY(85) NOT NULL,
        IsDisabled INT NOT NULL,
        [Type] CHAR(1) NOT NULL,
        PasswordHash VARBINARY(256) NULL
    );

DECLARE @Roles TABLE (
        RoleID INT IDENTITY(1, 1) NOT NULL PRIMARY KEY,
        RoleName sysname NOT NULL,
        LoginName sysname NOT NULL
    );

DECLARE @Permissions TABLE (
        PermID INT IDENTITY(1, 1) NOT NULL PRIMARY KEY,
        LoginName sysname NOT NULL,
        PermState NVARCHAR(60) NOT NULL,
        PermName sysname NOT NULL,
        Class TINYINT NOT NULL,
        ClassDesc NVARCHAR(60) NOT NULL,
        MajorID INT NOT NULL,
        SubLoginName sysname NULL,
        SubEndPointName sysname NULL
    );

/*
    Gather information on the logins, the roles they are members of and any 
    permissions they've been explicitly granted or denied...
*/

-- Grab logins 
/*
    As written the query below excludes sa, logins starting with ##, NT Authority 
    and NT Service accounts and distributor_admin.

    You can adjust the WHERE clause as desired to script out specific logins:

        WHERE p.name IN ( N'B', N'Tom', N'Judy' );

    or perhaps just SQL Authenticated logins (U and G are Windows Authenticated
    accounts):

        WHERE p.type = 'S';    
*/
INSERT  INTO @Logins ( PrincipalId, Name, SID, IsDisabled, Type, PasswordHash )
SELECT  p.principal_id, p.name, p.sid, p.is_disabled, p.type, l.password_hash
FROM    sys.server_principals p
        LEFT JOIN sys.sql_logins l ON l.principal_id = p.principal_id
WHERE   p.type IN ( 'U', 'G', 'S' ) AND   
        p.name <> N'sa' AND
        p.name NOT LIKE N'##%' AND
        p.name NOT LIKE N'distributor_admin' AND
        p.name NOT LIKE N'NT [SA]%';

-- Their server role assignments...
INSERT  INTO @Roles ( RoleName, LoginName )
SELECT  r.name, p.name
FROM    sys.server_role_members rm
        INNER JOIN sys.server_principals r ON r.principal_id = rm.role_principal_id
        INNER JOIN @Logins p ON p.PrincipalId = rm.member_principal_id
WHERE   r.type = 'R';

-- And any explicitly granted permissions
INSERT  INTO @Permissions ( LoginName, PermState, PermName, Class, ClassDesc, MajorID, SubLoginName, SubEndPointName )
SELECT  p.name, sp.state_desc, sp.permission_name, sp.class, sp.class_desc,
        sp.major_id, subp.name, subep.name
FROM    @Logins p
        INNER JOIN sys.server_permissions sp ON sp.grantee_principal_id = p.PrincipalId
        LEFT JOIN sys.server_principals subp ON subp.principal_id = sp.major_id AND sp.class = 101
        LEFT JOIN sys.endpoints subep ON subep.endpoint_id = sp.major_id AND sp.class = 105;


/*
    Now generate statements to create each login, role membership and permission 
    grant/deny...
*/

-- Script a Create Login statement for each entry in @Logins
SELECT  @CurrentId = 1, @LastId = MAX(LoginID)
FROM    @Logins;

WHILE (@CurrentId <= @LastId)
BEGIN
    SELECT  @ServerPrincipal = Name, @IsDisabled = IsDisabled,
            @Type = [Type], @SID = [SID], @PasswordHash = PasswordHash
    FROM    @Logins
    WHERE   LoginID = @CurrentId;
   
    BEGIN
        SET @SQLCmd = 'Create Login ' + QUOTENAME(@ServerPrincipal)
        
        -- For individual and group domain accounts...
        IF @Type IN ( 'U', 'G' )
        BEGIN
            SET @SQLCmd = @SQLCmd + ' From Windows;'
        END
        -- For SQL authenticated logins
        ELSE
        BEGIN
            SET @PasswordHashString = '0x' + CAST('' AS XML).value('xs:hexBinary(sql:variable("@PasswordHash"))', 'nvarchar(300)');
             SET @SQLCmd = @SQLCmd + ' With Password = ' + @PasswordHashString + ' HASHED, ';
            SET @SIDString = '0x' + CAST('' AS XML).value('xs:hexBinary(sql:variable("@SID"))', 'nvarchar(100)');
            SET @SQLCmd = @SQLCmd + 'SID = ' + @SIDString + ';';
        END

        PRINT @SQLCmd;
       
        -- Are any of the accounts currently disabled?
        IF @IsDisabled = 1
        BEGIN
            SET @SQLCmd = 'Alter Login ' + QUOTENAME(@ServerPrincipal) + ' Disable;'
            PRINT @SQLCmd;
        END
    END
    SET @CurrentId += 1;
END


-- Script an sp_addsrvrolemember statement for each entry in @Roles
SELECT  @CurrentId = 1, @LastId = MAX(RoleID)
FROM    @Roles;

WHILE (@CurrentId <= @LastId)
BEGIN
    SELECT  @ServerPrincipal = LoginName, @RoleName = RoleName
    FROM    @Roles
    WHERE   RoleID = @CurrentId;

    PRINT 'Exec sp_addsrvrolemember @rolename = ''' + @RoleName + ''', @loginame = ''' + @ServerPrincipal + ''';';

    SET @CurrentId += 1;
END


-- Script out any specific Grant or Deny statements
SELECT  @CurrentId = 1, @LastId = MAX(PermID)
FROM    @Permissions;

WHILE (@CurrentId <= @LastId)
BEGIN
    SELECT  @Permissionstate = PermState, @PermName = PermName,
            @Class = Class, @ServerPrincipal = LoginName, @MajorID = MajorID,
            @SQLCmd = PermState + SPACE(1) + PermName + SPACE(1) +
            CASE Class
                WHEN 101 THEN 'On Login::' + QUOTENAME(SubLoginName)
                WHEN 105 THEN 'On ' + ClassDesc + '::' + QUOTENAME(SubEndPointName)
                ELSE ''
            END + ' To ' + QUOTENAME(LoginName) + ';'
    FROM    @Permissions
    WHERE   PermID = @CurrentId;
   
    PRINT @SQLCmd;

    SET @CurrentId += 1;
END
 

Different States of Database in SQL Server

A SQL Server Database is can only be in one specific state at a given time. There are seven different States of SQL Server Database are:-

ONLINE : The database is online and available. This will show up as the  NORMAL state.

OFFLINE
:The database is unavailable. Databases are set offline by executing the  command ALTER DATABASE <DBName> SET OFFLINE. This can be done if the database administrator wants to move a database file from one location  to another. In this case, the database would be set OFFLINE, then the  ALTER DATABASE <DBName> MODIFY FILE command would be executed, followed by changing the database back to ONLINE.

RESTORING
: One or more files are being restored. The database is unavailable.

RECOVERING:
The database is being recovered. Except in the case of database mirroring, this is a transient state that occurs during the automatic or manual recovery process. The database is unavailable.

RECOVERY PENDING:
A database will be in this state if SQL Server encounters a resource-related error during recovery. The database will be unavailable until the database administrator resolves the resource error and allows the recovery process to be completed.

SUSPECT :One or more database files have been marked as suspect because of a data access or Read error. This may occur if a TORN PAGE has been detected during database Read operations. If a database has been marked as SUSPECT, the database is unavailable until the error has been resolved.

EMERGENCY: The database will be in this state when the database administrator has set the status to EMERGENCY. In this state, the database is in single-user mode and may be repaired or restored. If the database has been marked as SUSPECT, this is the first step in correcting the problem, short of adatabase restore. Only members of the sysadmin fixed server role can set  a database to the EMERGENCY state

How to Identify Current State of SQL Server Database?

Use master
GO
SELECT
@@SERVERNAME AS [Server Name]
,NAME AS [Database Name]
,DATABASEPROPERTYEX(NAME, 'Recovery') AS [Recovery Model]
,DATABASEPROPERTYEX(NAME, 'Status') AS [Database Status]
FROM dbo.sysdatabases
ORDER BY NAME ASC
GO

Capacity Planning in SQL


One of the first things that must be determined when planning new database is how much disk space will be required to support the database. The idea is to both ensure that there is sufficient disk space available for data expansion and to reduce the amount of data and log file growths that are performed to accommodate the data expansion to improve database efficiency.

If the database is being built to support an application purchased from a vendor, the capacity planning for the database should be very easy. However, the simplicity depends on the software vendor providing detailed documentation. The documentation must describe the average size of the database after periodic intervals where a defined number of users and transactions were supported. If the documentation is provided, you will have a good idea of what to expect from the database and can configure it accordingly. If the vendor did not provide the information, your job as a database administrator becomes a bit more complicated, and you may just have to guess. However, it must be an educated guess using as much
information as you are able to collect. The difficulty is often in the fact that you may not know how the vendor is storing and retrieving data, so the database must be monitored for growth trends to adequately predict the amount of storage space.If the database is being designed and built internally, there are established techniques for determining how big the data files will need to be. These techniques work because you know how much data is added for every transaction, whereas in a vendor-provided database, that information may not be available.One such technique that I am sure you will encounter is calculating a database size requirement by calculating table sizes.
It looks like this:



1)Add up the total number of bytes used by the fixed-length columns in the table.
2)Average the total number of bytes used by the variable-length columns in the table.
3)Add the number from Step 1 to the number calculated in Step 2.
4) Divide 8,060 (the maximum amount of data bytes in a page) by the number calculated in   Step 3, and round down to the nearest whole number. This is the number of rows that will fit on a single page. Remember that rows cannot span pages, which is why you round down.
5. Divide the total number of expected rows by the number of rows per page calculated in Step  4. This is the total number of data pages expected to support the table.
6. Multiply the number calculated in Step 5 by 8,192 (the size of the data page). This is the total  number of bytes required for the table.
7. Repeat the process for every table in the database.

SQL Server Profiler

The SQL Server Profiler is an absolutely essential tool for both DBAs and developers alike. Profiler provides the ability to monitor and record virtually every facet of SQL Server activity. It is actually a graphical interface for SQL Trace, which is a collection of stored procedures and functions that are used to monitor and record server activity.
SQL Server Profiler can be launched from the Tools menu of SQLServer Management Studio, or from the All Programs Microsoft SQL Server 2008 Performance Tools menu.
 

SQL Server Trace
The Profiler can be used to create and view SQL Server Traces. When creating a new trace, the Profiler will prompt you for the server on which you will be running the trace. Remember that the Profiler is just a graphical interface for SQL Trace, and what is occurring in the background is the execution of stored
procedures and functions on the server you connect to. If the server is very busy and is operating at the edge of its capabilities, the additional load of running SQL Trace on it may well put it over the edge.




Trace Properties
When creating a new trace, the Trace Properties dialog is shown below. The Trace Properties dialog includes two tabs: the General tab and the Events Selection tab. A third tab, Events Extraction Settings, will be enabled if any XML SHOWPLAN event is selected in the Events Selection tab. Trace Properties dialog.


General Tab
The General tab provides the ability to set the basic structure of the trace (such as the trace name, trace template, saving options, and trace stop time). It also displays the provider name and type, because SQL Server Profiler is not limited to the Data Engine. It can also be used to trace SQL Server Analysis Services.

Use the Template — This dropdown list contains several pre-built trace templates. Each template is a pre-defined set of events and filters that provide for the monitoring of SQL Server for particular purposes. These templates can be a good place to start when creating traces to monitor SQL Server. It is also possible to create your own templates, and it is strongly recommended that you do so. The provided templates are fine, but you will undoubtedly want to collect different information from that which the templates provide. To avoid having to create the same

custom trace over and over again, create and save a template to capture the information you are interested in.

Save to File — Selecting this checkbox will display a dialog prompting for a file location to save the trace data to. The filename defaults to the name assigned to the trace with the .trc extension. However, the name can be changed if desired. The default maximum file size for a trace file is 5 MB, but it can be set to virtually any size. When the ‘‘Save to file’’ option is selected, two additional options are enabled: the ‘‘Enable file rollover’’ option and the ‘‘Server processes trace data’’ option.

 Enable File Rollover — This option causes a new file to be created every time the maximum file size is reached. Each file created is named the same as the original file with a sequential number added to the end of the name. Each sequential file is linked to the preceding file, so that each file can be opened in sequence, or they can all be opened in a single trace window.

 Server Processes Trace Data — This option causes the server that the traces are running on to also process the trace information. By default, the Profiler application processes the  trace information. During high-stress operations, if the Profiler processes the data, it may  drop some events and even become unresponsive. If the server processes the trace data,  no events will be dropped. However, having the server process the trace data and run the     trace puts an additional load on the server, which can have a negative impact on server performance.
❑ Save to Table — Trace data can also be saved to a table instead of a file by selecting the ‘‘Save  to table’’ option. This is very useful if the trace data is going to be analyzed by an external application that requires access to the data stored in a relational format. The down side is that large traces will generate huge amounts of data that will be inserted into the storage table. This can also cause server performance issues, but you can mitigate this by saving trace information to a different server from your production system. If saving trace data to a table, the maximum amount of rows to be stored can also be assigned.
❑ Enable Trace Stop Time — Traces can be started and configured to automatically stop at a pre-defined time by enabling the ‘‘Enable trace stop time’’ option and assigning a stop time.
Events Selection Tab
The Events Selection tab provides the ability to choose what SQL Server events are to be traced  Events are grouped in 21 SQL Server event groups with a total of 170 distinct SQL Server events, plus 10 user-definable events. There are also 11 Analysis Services Groups with 38 distinct events.SQL Server Books Online has an excellent reference that describes each group and event. Search for the titles of ‘‘SQL Server Event Class Reference’’ for SQL Server events and ‘‘Analysis Services Event Classes’’for Analysis Services Events.
❑ Column Filters — Also in the Events Selection tab is the option to filter the events that are traced. The ability to filter the data is incredibly useful. For example, if you are troubleshooting a particular application, you can filter on just the events generated by the application of interest and avoid having to sift through all the events generated by SQL Server and other applications.
❑ Organize Columns — The Organize Columns button enables you to place the trace columns you are most interested in so that they are easily seen when viewing the trace. Because a great deal of
data can be returned, it may very well be that the column you are most interested in is off the screen to the left. The Organize Columns button helps prevent this.