Create a point-in-time restore automatically – SQL 2005 version


A new version has been created! Check it out here: Updated: create point-in-time restore script automatically

This is a 2005-version of the script originally posted here: Create a full Point-In-Time restore script automatically! – 2008 version

/*
Generate Point-in-time restore script, given only DBName and Point-in-time timestamp
Works only on SQL2005 databases!!

Script for generating restore script for full backup originally created by Michael 
Valentine Jones, SQLTeam.com

The rest is created by Henning Frettem, www.thefirstsql.com
*/
SET NOCOUNT ON
GO

DECLARE 
  @DBName varchar(200),
  @PointInTime datetime,
  @Filename varchar(200),
  @tab varchar(1),
  @cr varchar(2),
  @Full_BackupStartDate datetime,
  @Diff_BackupStartDate datetime,
  @Log_BackupStartDate datetime,
  @SQL nvarchar(max)
  
SET @DBName = 'MyDatabaseName'
SET @PointInTime = '2011-11-22 09:45:00'
SET @tab = char(9)
SET @cr = char(13)+char(10)
SET @SQL = ''


BEGIN TRY
  --> Performing some checks
  IF NOT EXISTS (SELECT 1 FROM msdb.dbo.backupset WHERE database_name = @DBName AND type = 'D' AND backup_start_date <= @PointInTime)
    RAISERROR(N'No full backup exists prior to the specified PointInTime, restore is not possible at the given PointInTime.', 16, 1)
  ELSE IF NOT EXISTS (SELECT 1 FROM msdb.dbo.backupset WHERE database_name = @DBName AND type = 'L' AND backup_start_date > @PointInTime)
    RAISERROR(N'No backup of the log exist after the specified PointInTime, do a tail-log backup before running this script again', 16, 1)

  --> Getting the filename and the date of the last full backup prior to the PointInTime
  SELECT TOP 1 
    @Filename = b.physical_device_name,
    @Full_BackupStartDate = backup_start_date
  FROM msdb.dbo.backupset a
    INNER JOIN msdb.dbo.backupmediafamily b
      ON a.media_set_id = b.media_set_id
  WHERE a.database_name = @DBName
    AND a.type = 'D'
    AND a.backup_start_date <= @PointInTime
  ORDER BY a.backup_start_date DESC

  --> Create temp-tables for file header information
  DECLARE @header table (
	  BackupName varchar(128) NULL,
	  BackupDescription varchar(255) NULL,
	  BackupType smallint NULL,
	  ExpirationDate datetime NULL,
	  Compressed tinyint NULL,
	  Position smallint NULL,
	  DeviceType tinyint NULL,
	  UserName varchar(128) NULL,
	  ServerName varchar(128) NULL,
	  DatabaseName varchar(128) NULL,
	  DatabaseVersion int NULL,
	  DatabaseCreationDate datetime NULL,
	  BackupSize numeric(20,0) NULL,
	  FirstLSN numeric(25,0) NULL,
	  LastLSN numeric(25,0) NULL,
	  CheckpointLSN numeric(25,0) NULL,
	  DatabaseBackupLSN numeric(25,0) NULL,
	  BackupStartDate datetime NULL,
	  BackupFinishDate datetime NULL,
	  SortOrder smallint NULL,
	  CodePage smallint NULL,
	  UnicodeLocaleId int NULL,
	  UnicodeComparisonStyle int NULL,
	  CompatibilityLevel tinyint NULL,
	  SoftwareVendorId int NULL,
	  SoftwareVersionMajor int NULL,
	  SoftwareVersionMinor int NULL,
	  SoftwareVersionBuild int NULL,
	  MachineName varchar(128) NULL,
	  Flags int NULL,
	  BindingID uniqueidentifier NULL,
	  RecoveryForkID uniqueidentifier NULL,
	  Collation varchar(128) NULL,
	  FamilyGUID uniqueidentifier NULL,
	  HasBulkLoggedData bit NULL,
	  IsSnapshot bit NULL,
	  IsReadOnly bit NULL,
	  IsSingleUser bit NULL,
	  HasBackupChecksums bit NULL,
	  IsDamaged bit NULL,
	  BeginsLogChain bit NULL,
	  HasIncompleteMetaData bit NULL,
	  IsForceOffline bit NULL,
	  IsCopyOnly bit NULL,
	  FirstRecoveryForkID uniqueidentifier NULL,
	  ForkPointLSN numeric(25,0) NULL,
	  RecoveryModel varchar(60) NULL,
	  DifferentialBaseLSN numeric(25,0) NULL,
	  DifferentialBaseGUID uniqueidentifier NULL,
	  BackupTypeDescription varchar(60) NULL,
	  BackupSetGUID uniqueidentifier NULL,
	  Seq int NOT NULL identity(1,1)
    )

  --> Create temp-table for db file information
  DECLARE @filelist TABLE  (
	  LogicalName varchar(128) NULL,
	  PhysicalName varchar(260) NULL,
	  Type char(1) NULL,
	  FileGroupName varchar(128) NULL,
	  Size numeric(20,0) NULL,
	  MaxSize numeric(20,0) NULL,
	  FileID bigint NULL,
	  CreateLSN numeric(25,0) NULL,
	  DropLSN numeric(25,0) NULL,
	  UniqueID uniqueidentifier NULL,
	  ReadOnlyLSN numeric(25,0) NULL,
	  ReadWriteLSN numeric(25,0) NULL,
	  BackupSizeInBytes bigint NULL,
	  SourceBlockSize int NULL,
	  FileGroupID int NULL,
	  LogGroupGUID uniqueidentifier NULL,
	  DifferentialBaseLSN numeric(25,0) NULL,
	  DifferentialBaseGUID uniqueidentifier NULL,
	  IsReadOnly bit NULL,
	  IsPresent bit NULL,
	  Seq int NOT NULL identity(1,1)
    )

  --> Get header and filelist information from the backup file
  INSERT INTO @header
  EXEC ('RESTORE HeaderOnly FROM DISK = ''' + @Filename + '''')

  INSERT INTO @filelist
  EXEC ('RESTORE FilelistOnly FROM DISK = ''' + @Filename + '''')

  --> Generate the full backup restore script
  SELECT
    @SQL = @SQL + 
      CASE
        WHEN a.Seq = 1 THEN 
          @cr + 'RESTORE DATABASE [' + c.DatabaseName + ']' +
          @cr + 'FROM DISK =' + @cr + @tab + '''' +
          @Filename + '''' + @cr + 'WITH' 
        ELSE ''
      END
      + @cr + @tab + 'MOVE ''' + a.LogicalName + ''' TO ''' + a.PhysicalName + ''',' 
      +
      CASE 
        WHEN a.Seq = b.Seq THEN
          @cr + @tab + 'REPLACE, STATS = 5, NORECOVERY'
        ELSE ''
      END
  FROM
    @filelist a
    CROSS JOIN
      (SELECT Seq = MAX(b1.Seq) FROM @filelist b1 ) b
    CROSS JOIN
      (SELECT DatabaseName = MAX(c1.DatabaseName) FROM @header c1) c
  ORDER BY
    a.Seq
    
  SELECT @SQL = @SQL + @cr + 'GO' + @cr + @cr 
    
  --> Restore the last differential backup if it exists
  SELECT TOP 1
    @SQL = @SQL + 'RESTORE DATABASE [' + @DBName + '] FROM DISK = ''' + b.physical_device_name + ''' WITH NORECOVERY' + @cr + 'GO',
    @Diff_BackupStartDate = a.backup_start_date
  FROM msdb.dbo.backupset a
    INNER JOIN msdb.dbo.backupmediafamily b
      ON a.media_set_id = b.media_set_id
  WHERE a.database_name = @DBName
    AND a.type = 'I'
    AND a.backup_start_date > @Full_BackupStartDate
    AND a.backup_start_date < @PointInTime
  ORDER BY a.backup_start_date DESC
  
  IF @Diff_BackupStartDate IS NULL
    SET @Diff_BackupStartDate = @Full_BackupStartDate
    
  --> Generate all log restores except the last one
  SELECT 
    @SQL = @SQL + 'RESTORE LOG [' + @DBName + '] FROM DISK = ''' + b.physical_device_name + ''' WITH NORECOVERY' + @cr + 'GO' + @cr,
    @Log_BackupStartDate = a.backup_start_date   
  FROM msdb.dbo.backupset a
    INNER JOIN msdb.dbo.backupmediafamily b
      ON a.media_set_id = b.media_set_id
  WHERE a.database_name = @DBName
    AND a.type = 'L'
    AND a.backup_start_date > @Diff_BackupStartDate
    AND a.backup_start_date < @PointInTime
  ORDER BY a.backup_start_date
  
  --> Generate last log restore script with the stopat command and recovery
  SELECT TOP 1
    @SQL = @SQL + 'RESTORE LOG [' + @DBName + '] FROM DISK = ''' + b.physical_device_name + ''' 
      WITH RECOVERY, STOPAT = ''' + CONVERT(varchar(20), @PointInTime, 120) + '''' + @cr + 'GO' + @cr
  FROM msdb.dbo.backupset a
    INNER JOIN msdb.dbo.backupmediafamily b
      ON a.media_set_id = b.media_set_id
  WHERE a.database_name = @DBName
    AND a.type = 'L'
    AND a.backup_start_date > @PointInTime
  ORDER BY a.backup_start_date ASC

  PRINT @SQL

END TRY
BEGIN CATCH
  PRINT ERROR_MESSAGE()
END CATCH

How to find gaps in identity columns at the speed of light


I was just recently working with a client that was moving a few 100k rows of data on a daily basis from one system to another and all this data was incremental in the sense that the source data was a ticket system and there was an incrementing ID on all the sales records. Now the problem was that for various reasons not all data had been transferred in the past, a few sales records had been deleted because of void transactions (these were not so many), others were simply missing (this was a substantial bunch). All these missing data  were amounting to quite a bit of money in the sales reports so I had to find a way to identify and transfer them on a regular basis so that this problem was kept at a bare minimum.

The tables that needed to be synced had in excess of 6 mill rows and even though it’s not huge it’s more than enough to cause problems if you write some bad code. SO! With some excellent help from all my geek friends over at http://www.sqlteam.com/forums  I was able to come down with a script that did the trick…and mind you; it took less than 30 seconds to execute on the production box identifying over 500 gaps! Now since we are only talking 100k rows here the query will probably have sub-second execution time but it scales very well (the concept behind the query originally created by MVP Steve Kass, http://www.stevekass.com/)

-- Make sure insert runs a little faster
SET NOCOUNT ON
GO

-- Prepare a sample table
CREATE TABLE GapsTest (
	ID int NOT NULL IDENTITY(1, 1) PRIMARY KEY CLUSTERED,
	Name varchar(50) NULL
	)
GO

-- Insert 100k rows of dummy data
INSERT INTO GapsTest (Name) VALUES ('TheFirstSQL')
GO 100000

-- Delete some rows to create gaps in the identity column
DELETE GapsTest WHERE ID IN (263, 264, 37392, 44, 98712, 66219)
DELETE GapsTest WHERE ID BETWEEN 35000 AND 36000
GO

-- Run the "magic" query
;WITH 
cte AS (
	SELECT 
		ID, 
		RowNum = ROW_NUMBER() OVER (ORDER BY ID) 
	FROM dbo.GapsTest),
cte2 AS (
	SELECT *, DENSE_RANK() OVER (ORDER BY ID - RowNum) As Series 
	FROM cte),
cte3 AS (
	SELECT *, COUNT(*) OVER (PARTITION BY Series) AS SCount 
	FROM cte2),
cte4 AS (
	SELECT 
		MinID = MIN(ID), 
		MaxID = MAX(ID), 
		Series
	FROM cte3
	GROUP BY Series)
SELECT GapStart = a.MaxID, GapEnd = b.MinID
FROM cte4 a
	INNER JOIN cte4 b
		ON a.Series+1 = b.Series
ORDER BY GapStart

How to enable Master Data Services (MDS) in SQL Server 2008 R2


Enabling Master Data Services (MDS) in SQL Server 2008 R2 is easy, but it isn’t a part of the regular installation process so I thought I’d show you how. It is in fact veeery simple! There are however some prerequisites:

  • Master Data Services is only available in 64-bit environments
  • You need to have the .NET Framework 3.5 SP1 installed (this is installed with SQL 2008 R2)
  • You need to enable the IIS-role on the server in order to run the Master Data Manager

If you open up the SQL 2008 R2  installation media and browse the files on the DVD/ISO you will find a  folder there called Master Data Services. You will have to traverse through a few subfolders but the only content once you get to the bottom is an .msi-file called “MasterDataServices.msi”. There is not a whole lot of configuration to be done in the installer, so just run it like any other .msi and wait until it finishes.

Once the installer is finished the Master Data Services Configuration Manager should open itself up automatically. If it doesn’t you’ll find it on the Start menu under SQL Server 2008 R2 -> Master Data Services -> Configuration Manager. It should look like this:

Create an MDS database

The next thing you need to do is to create the Master Data Services database. Click “Databases” and then “Crate database” and it will open up a wizard for you that will take you through the create process. After af ew clicks it will ask you to authenticate to the server where the MDS database will be created, and after that you are asked to specify a database name. I usually like to call it “MDS” for simplicity:

Then you are asked to specify a service account. This is an account that the Master Data Services website (Master Data Manager) will use to authenticate with the MDS database. It will have to be a Windows account, either a local one or preferably a domain account.

The next page in the wizard tells you to specify the administrator account. And this is very important: The administrator account CAN NOT be changed at a later time!

And that’s it for the database! When the create database wizard is finished you can browse the different settings

Configuring the Master Data Manager website

Once the database has been created it’s time to set up the Master Data Manager Website. Go to Web Configuration in the Configuration Manager and click the “Create Site“-button…it will start a wizard that will create all the stuff you need:

The create site pop-up should look something like this:

You might get an error if you try to set the new site up to run on port 80 because the Default Web Site already runs on this port. Specify a host header, change the port number or remove the default web site to resolve this issue.

Connect the Master Data Manager website with the MDS database

When the new MDS website has been created you have to connect it to the MDS database in order for it to work properly. Click the Select… button in the database section of the window to open up the database connection. Specify the connection properties and choose the newly created MDS database, and then click OK. It should look something like this:

Once you have done this you are done! The only thing left is to decide whether you would like to expose Master Data Services as a web service and if you are in a master data management project you’ll most likely want to do that. So check the check box:

Click Apply and all the settings you have created are saved and if all goes well (crossing fingers!) you’ll get this pop-up in 20-30 seconds after clicking the apply button:

Click OK again and your new Master data Manager website opens up beautifully in the browser on your server.

Happy Master Data Management!!

 

Regular Expressions, advanced string matching and new Split function SQL Server 2008 R2


NOTE: in order to use the functions discussed below you will need to enable Master Data Services on your server and install a version of the MDS database. You *do not* need to configure the Master Data Manager website or anything else, and since the MDS is not a Windows Service it will not create any additional overhead on your server. Just run the .msi and create the database and how you do this is described here: https://thefirstsql.com/2011/02/08/how-to-enable-master-data-services-mds-in-sql-server-2008-r2/

I’ve only recently begun investigating the new Master Data Services in SQL Server 2008 R2 and I have to say that I was completely baffled at some of the new functionality!! MDS is regarded as a Data Management tool and is usually only used in Business Intelligence projects. And it was for this purpose I started investigating, but I soon realised that some of the functions that come with MDS has a far greater potential than just in BI projects. The following functions comes out-of-the-box when you install an instance of Master Data Services:

  • mdq.NGrams
  • mdq.RegexReplace
  • mdq.RegexExtract
  • mdq.RegexSplit
  • mdq.RegexIsMatch
  • mdq.Similarity
  • mdq.RegexIsValid
  • mdq.SimilarityDate
  • mdq.RegexMask
  • mdq.Split

Now, I have to admit that I can’t see the real usage value for all of these but I find mdq.RegexIsMatch, mdq.Similarity and mdq.Split to be reeeeeaally interesting :) They can solve a whole bunch of problems that previously had no solution at all or that required some cumbersome workarounds.

mdq.RegexIsMatch

There is no doubt about it; I suck at RegEx! Always when I want to do some complex matching I have to search the web for hours to find an example that I can copy and that’s kind of embarrassing. Nevertheless, this new function makes it possible to use RegEx in sql server without having to create your own clr assembly and that really helps! Here is how you use the new function (be aware that you’ll have to install the MDS database first):

--> Usage: mdq.RegexIsMatch (input,pattern,mask)
declare @table table (
	email varchar(200))
insert into @table values ('my.sample@mail.com'),('15.sample@mail.com'), ('my.sample@mail.c')

select email, isvalid = mdq.RegexIsMatch(email, N'^([a-zA-Z0-9_\-\.]+)@((\[[0-9]{1,3} \.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?)+$', 0)
from @table
--> Result
email	isvalid
my.sample@mail.com	1
15.sample@mail.com	1
my.sample@mail.c	0

A rediculously long regex expression but it works!

mdq.Similarity

The coolest of the new functions BY FAR is the mdq.Similarity function. It’s not something you’ll use every day, but when you need it you *really* need it! What it basically does is to compare two strings and score their similarity from 0.0 to 1.0 and the higher the number is the more alike the two strings are. This is extremely useful when you for example need to merge two tables together or you need to find closely matching records and you don’t have a full-text index available. The function supports 4 different matching algorithms:

  • Levenshtein edit distance algorithm (0)
  • Jaccard similarity coefficient algorithm (1)
  • A form of the Jaro-Winkler distance algorithm (2)
  • Longest common subsequence algorithm (3)

I will not go in to detail about each algorithm, but to my (limited) experience it seems that the Jaro-Winkler distance algorithm (3) will give the best result when comparing name records which is what I have done so far. Here’s how you would use it:

--> Usage: mdq.Similarity (input1,input2,method,containmentBias , [ minScoreHint ] )
SELECT mdq.Similarity(N'The First SQL', N'TheFirstSQL', 2, 0.0, 0.0);
--> Result:
(No column name)
0,964102564102564

mdq.Split

The Split function is widely used in programming and in SQL Server there are numerous implementations that span from custom clr-functions to all kinds of user defined functions. And the mdq.Split works more or less exactly as you expect it to, except it has some additional functionality. You can for example use regular expressions inside the split function:

--> Usage: mdq.Split (input,separators,removeEmpty,tokenPattern,[mask])
select * from mdq.split('the, first, sql, rules', ', ', 1, '', 0)
--> Result:
Sequence	Token	IsValid
1	the	1
2	first	1
3	sql	1
4	rules	1

Pretty cool, eh?? :)

Error accessing SSRS Reports


I reinstalled my computer recently and as a result I had to reconfigure my Reporting Services installation as well. After configuring everything I was sort of surprised when I got this message when I tried accessing the http://localhost/Reports url: “User ‘NO-DOM\hef’ does not have required permissions. Verify that sufficient permissions have been granted and Windows User Account Control (UAC) restrictions have been addressed.”

And it actually took me quite a few minutes (in the 30’s! yes, it’s late in the day…) to figure out the solution even though it was quite simple. By Starting the browser as an Administrator everything worked perfectly… :)

 

SQL Server Profiler – a tutorial


During all kinds of database interaction there are times when you want to see what is actually being sent to the database, the actual queries that are executed by the database engine. Some times you have 3rd party apps that do all kinds of different stuff under the hood, or maybe you have inherited a website from some developer that made a serious spaghetti of everything. The only way of knowing what is really going on in the database is to use SQL Server Profiler.

Now, although profiler is a simple yet *very* powerful tool, it does take some tweaking to make it work they way you want it to. It comes with some default templates but in my experience they are not nearly good enough to be useful in a working environment. There are a whole bunch of objects/events that can be monitored but what I’m gonna do is show you how to make a trace template that will provide the information you need most of the time.

Here is the order of things you need to do:

  1. First of all you need to open up Profiler. You can either find it in the Tools menu in SQL Server Management Studio, or you can find it on the Windows Start Menu under Microsoft SQL Server -> Performance Tools.
  2. When Profiler opens it will ask you to connect to the server you want to profile. Click “Cancel” in this window, no connection is needed to create a Profiler Template like we are doing now.
  3. You should now have a blank Profiler window and it’s time to create the new template. Go to File -> Templates and then click “New Template”
  4. The General window of the Template generator pops up and you need to fill out the selected server type for the template and a new template name. I usually go for “MyTemplate” (quite original right :) as the name  as I usually only have one custom template in there. You should also check “Base the new template on an existing one” and choose “TSQL_Duration” as your existing template. Finally check the last checkbox to make this the default template for new traces. Do NOT click Save!
  5. Now you need to click “Event Selection” which is the second tab in the open window and for people not familiar with Profiler this is where the tricky part starts. After all, how do you know which events you need to choose if you’ve never used it before??
  6. Since we chose to base the new template on an existing one (TSQL_duration), the events selection window will be preconfigured with some events. Be aware that the two events that are chosen will only take in to account those queries/procedures that have actually finished. Check the checkbox in the lower right part of the window that says “Show all columns”. A bunch of checkboxes will appear next to the two preconfigured events.
  7. Now you need to do the following:
    • Remove the checkbox for the column BinaryData for the event RPC:Completed.

    Then make sure that the following checkboxes are checked for BOTH events:

    • Duration (the number of milliseconds it took to run the query)
    • TextData (the actual sql query text/stored procedure call that is sent to the database, including parameters)
    • SPID (the system process ID of the current connection)
    • ApplicationName (the name of the application the query/procedure originates from, could sometimes be empty)
    • DatabaseName (the database name the query is sent to)
    • HostName (the server name of the server sending the query)
    • LoginName (the username of the authenticated user that opened the connection)
    • StartTime (the timestamp when the query was started)
  8. Ok, we’ve come a long way now! What we need to do now is to set some Column Filters so our traces wont be cluttered with mumbo-jumbo that is not relevant to what we are doing:
    • Uncheck the “Show all columns”
    • Click “Column Filters”
  9. Setting the correct Column Filters is essential to getting good trace output and if you don’t do it right your trace might get flooded a lot of stuff that will make it harder for you to find what you’re looking for. And the only column filter we will set for the *template* is a SPID filter of Greater than or equal to 50. All SPIDs that have a number less than 50 are reserved for internal processes and are usually not of any value to us. Also make sure that the “Exclude rows that do not contain values” is checked. Click OK to save your column filters. When at a later stage you are  using this template, it will be useful to add additional filters. Remember that this is only a template, so when your’e running a trace using this template you can change all the events and columns and filters to your liking. What I usually do though is to create filters on the TextData (the sql query/procedure that is being exeuted) or the DatabaseName. Filters are ususlly specified as LIKE with a value of i.e “%CREATE TABLE%” (without any quotation marks) or %MyDatabaseName% and always remember to check the “Exclude rows that do not contain values” when making filters.
  10. Now you need to click “Organize Columns”. And I have always found Profiler to be a little weird when it comes to this organizing, because Profiler actually sorts the output of the trace according to the order you specify here. This in itself might not be so weird but the fact that it is impossible to sort the output of a trace after it has started makes this step really important! And it took me quite a while to figure it out actually :) And to me at least it’s vital that the trace is always sorted by StartTime. If you sort by StartTime it has no real importance what the order of the other columns are so these can be put in whichever order you prefer. After your finished organizing your windows should look more or less like this. The important thing is that the StartTime is on top!
  11. Once you have done all this you are set to go, and all you need to do now is to click “Save”. Your template is done!
  12. To test the newly created template you need to start a new trace by going to File -> New Trace. You need to log on to the server you want to trace, and once you have done that the new trace window will open. If you set the template you just created to be the default template it will automatically be selected for you, if not you will have to select it manually in the “Use the template” dropdown.
  13. Click Run and all new trace events that satisfy your filters will now appear in the order of when they were started :) By selecting one or more lines (use the Ctrl-key to select several)  the actual t-sql that was sent to the database will be displayed in the details window. If you select several lines, they will be separated by the GO statement automatically. 

Happy Tracing!

Mirroring between two stand-alone servers using certificates


Mirroring is usually done between two servers inside the same network where both servers are a part of a domain and have a trusted authentication mechanism provided by Active Directory. But what if this is not the case? What if you have your mirroring partners in separate networks or in separate data centers even, where there is no trusted authentication possible?

There is still hope, and the solution is to use SQL Server certificates. Here are the scripts you need to set up mirroring between a PRINCIPAL and a MIRROR server using certificates.

PLEASE NOTE: you can not simply copy and paste these scripts in to SQL Server Management Studio, press F5 and expect everything to work. You need to run each individual statement by itself, and some times you will need to run scripts on the mirror before progressing on the principal (i.e you need to create and do a backup of the certificate on the mirror before importing it to the principal, obviously)

--> This is the setup of the PRINCIPAL (master) in a mirroring environment

USE master
GO

CREATE MASTER KEY ENCRYPTION BY PASSWORD = 'password';
GO

--> Create a name not using "master" or "principal", remember: the roles can 
--  switch but the certificates remain the same. 
CREATE CERTIFICATE SQLSrv1_mirroring_cert 
   WITH SUBJECT = 'SQLSrv1 certificate',
      START_DATE = '2009-01-01 00:00:00',
      EXPIRY_DATE = '2030-01-01 00:00:00'
GO

CREATE ENDPOINT Mirroring_Endpoint
   STATE = STARTED
   AS TCP (
      LISTENER_PORT = 7024, 
      LISTENER_IP = ALL
   ) 
   FOR DATABASE_MIRRORING ( 
      AUTHENTICATION = CERTIFICATE SQLSrv1_mirroring_cert, 
      ENCRYPTION = REQUIRED ALGORITHM AES, 
      ROLE = ALL
   )
GO

--> Backup certificate and transfer to SQLSrv2
BACKUP CERTIFICATE SQLSrv1_mirroring_cert TO FILE = 'C:\SQLSrv1_mirroring_cert.cer';
GO

--> Create a login for the other server process
CREATE LOGIN SQLSrv2_mirroring_login WITH PASSWORD = 'password';
GO

--> Create a user for the new login 
CREATE USER SQLSrv2_mirroring_user FOR LOGIN SQLSrv2_mirroring_login;
GO

--> Associate the certificate with the user.
CREATE CERTIFICATE SQLSrv2_cert
   AUTHORIZATION SQLSrv2_mirroring_user
   FROM FILE = 'C:\SQLSrv2_mirroring_cert.cer'
GO

--> Grant the user access to the endpoint 
GRANT CONNECT ON ENDPOINT::Mirroring_Endpoint TO [SQLSrv2_mirroring_login]
GO

--> Create a FULL BACKUP of each database to be mirrored
BACKUP DATABASE mojo TO DISK = 'D:\SQLbackup\mojo_20090211.bak'

--> Restore these databases to the MIRROR using NORECOVERY

--> Set up partnering between the mirror and the principal (use a fully qualified name or an IP address)

--> Set up partnering between the principal and the mirror (use a fully qualified name or an IP address)
ALTER DATABASE mojo SET PARTNER ='tcp://sqlsrv2:7024'
GO
--> This is the setup of the MIRROR (slave) in a mirroring environment

USE master
GO

--> Make sure a master key is set 
CREATE MASTER KEY ENCRYPTION BY PASSWORD = 'password';
GO

--> 
CREATE CERTIFICATE SQLSrv2_mirroring_cert 
   WITH SUBJECT = 'SQLSrv2 certificate',
      START_DATE = '2009-01-01 00:00:00',
      EXPIRY_DATE = '2030-01-01 00:00:00'
GO

CREATE ENDPOINT Mirroring_Endpoint
   STATE = STARTED
   AS TCP (
      LISTENER_PORT = 7024, 
      LISTENER_IP = ALL
   ) 
   FOR DATABASE_MIRRORING ( 
      AUTHENTICATION = CERTIFICATE SQLSrv2_mirroring_cert, 
      ENCRYPTION = REQUIRED ALGORITHM AES, 
      ROLE = ALL
   )
GO

--> Backup certificate and transfer to SQLSrv1
BACKUP CERTIFICATE SQLSrv2_mirroring_cert TO FILE = 'C:\SQLSrv2_mirroring_cert.cer';
GO

--> Create a login for the other server process
CREATE LOGIN SQLSrv1_mirroring_login WITH PASSWORD = 'password'
GO

--> Create a user for the new login 
CREATE USER SQLSrv1_mirroring_user FOR LOGIN SQLSrv1_mirroring_login;
GO

--> Associate the certificate with the user.
CREATE CERTIFICATE SQLSrv1_mirroring_cert
   AUTHORIZATION SQLSrv1_mirroring_user
   FROM FILE = 'C:\SQLSrv1_mirroring_cert.cer'
GO

--> Grant the user access to the endpoint 
GRANT CONNECT ON ENDPOINT::Mirroring_Endpoint TO [SQLSrv1_mirroring_login]
GO

--> Copy a backup of the database(s) to be mirrored

--> Restore them using NORECOVERY
RESTORE DATABASE mojo
   FROM DISK='d:\SQLBackups\mojo_20090211.bak'
   WITH NORECOVERY
GO

--> Set up partnering between the mirror and the principal (use a fully qualified name or an IP address)
ALTER DATABASE mojo SET PARTNER ='tcp://sqlsrv1:7024'
GO