Custom Search
body { background:#fff url("http://www.blogblog.com/harbor/rocks_left.jpg") no-repeat right bottom; background-attachment:fixed; margin:0; padding:0; font:x-small Georgia, Serif; color:#333333; font-size/* */:/**/small; font-size: /**/small; } /* Commented Backslash Hack hides rule from IE5-Mac \*/ body {background-attachment:scroll;} /* End IE5-Mac hack */ a:link { color:#336688; text-decoration:none; } a:visited { color:#764; text-decoration:none; } a:hover { color:#993333; text-decoration:underline; } a img { border-width:0; } /* Page Structure ----------------------------------------------- */ #wrap { background:url("http://www.blogblog.com/harbor/sky_left.jpg") repeat-x; min-width:740px; margin:0; padding:0; text-align:left; font: normal normal 100% Georgia,Serif; } #wrap2 { background:url("http://www.blogblog.com/harbor/lighthouse_left.jpg") no-repeat left 0px; } #wrap3 { background:url("http://www.blogblog.com/harbor/cloud_left.jpg") no-repeat right 75px; } #wrap4 { background:url("http://www.blogblog.com/harbor/center_cloud_left.jpg") no-repeat 50% 0px; padding:15px; width:100%; width/* */:/**/auto; width: /**/auto; } #outer-wrapper { max-width:890px; padding: 0 30px 50px; width:100%; width/* */:/**/auto; width: /**/auto; } html>body #outer-wrapper { border:3px double #fff; } #main-wrapper { width:64%; float:right; word-wrap: break-word; /* fix for long text breaking sidebar float in IE */ overflow: hidden; /* fix for long non-text content breaking IE sidebar float */ } #main { margin:0; padding:0; } #sidebar-wrapper { width:32%; float:left; word-wrap: break-word; /* fix for long text breaking sidebar float in IE */ overflow: hidden; /* fix for long non-text content breaking IE sidebar float */ } #sidebar { margin:0; padding-top: 170px; } /** Page structure tweaks for layout editor wireframe */ body#layout #outer-wrapper, body#layout #sidebar, body#layout #wrap4, body#layout #header { margin-top: 0; margin-bottom: 0; padding: 0; } body#layout #sidebar-wrapper { width: 180px; margin-left: 0; } body#layout #wrap4, body#layout #outer-wrapper { width: 650px; } /* Header ----------------------------------------------- */ #header { padding-top:15px; padding-right:0; padding-bottom:10px; padding-left:110px; position: relative; } .Header h1 { margin:0 0 .25em; color:#667788; font: normal normal 270% Georgia, Serif; } .Header h1 a { color:#667788; text-decoration:none; } .Header .description { margin:0; max-width:700px; line-height:1.8em; text-transform:uppercase; letter-spacing:.2em; color:#667788; font: normal normal 75% Georgia, Serif; } /* Headings ----------------------------------------------- */ h2 { margin:1.5em 0 .75em; line-height: 1.4em; font: normal normal 78% Georgia, Serif; text-transform:uppercase; letter-spacing:.2em; color:#993333; } /* Posts ----------------------------------------------- */ h2.date-header { margin:2em 0 .5em; color: #993333; font: normal normal 78% Georgia, Serif; } .post { margin:.5em 0 1.5em; } .post h3 { margin:.25em 0 0; padding:0 0 4px; font-size:140%; font-weight:normal; line-height:1.4em; } .post h3 a, .post h3 strong { background:url("http://www.blogblog.com/harbor/icon_lighthouse.gif") no-repeat left .15em; display:block; padding-left:20px; text-decoration:none; color:#336688; font-weight:normal; } .post h3 strong { background-image:url("http://www.blogblog.com/harbor/icon_lighthouse2.gif"); color:#000; } .post h3 a:hover { color:#993333; } .post-body { background:url("http://www.blogblog.com/harbor/divider.gif") no-repeat center top; padding-top:12px; margin:0 0 .75em; line-height:1.6em; } .post-body blockquote { line-height:1.3em; } .post-footer { color:#999; text-transform:uppercase; letter-spacing:.1em; font-size: 78%; line-height: 1.4em; } .comment-link { margin-left:.4em; } .post-footer .post-timestamp, .post-footer .post-author { color:#666; } .comment-link strong { font-size:130%; } .comment-link { margin-left:.4em; } .post img { padding:4px; border:1px solid #cde; } /* Comments ----------------------------------------------- */ #comments { background:url("http://www.blogblog.com/harbor/divider.gif") no-repeat center top; padding:15px 0 0; } #comments h4 { margin:1em 0; font-weight: bold; line-height: 1.6em; text-transform:uppercase; letter-spacing:.2em; color: #993333; font: bold 78% Georgia Serif; } #comments h4 strong { font-size:130%; } #comments-block { margin:1em 0 1.5em; line-height:1.4em; } #comments-block dt { margin:.5em 0; } #comments-block dd { margin:.25em 20px 0; } #comments-block dd.comment-timestamp { margin:-.25em 20px 1.5em; line-height: 1.4em; text-transform:uppercase; letter-spacing:.1em; } #comments-block dd p { margin:0 0 .75em; } .deleted-comment { font-style:italic; color:gray; } .feed-links { clear: both; line-height: 2.5em; } #blog-pager-newer-link { float: left; } #blog-pager-older-link { float: right; } #blog-pager { text-align: center; } .comment-footer { font: 78%/1.4em Georgia , Serif; } /* Sidebar Content ----------------------------------------------- */ .sidebar .widget, .main .widget { background:url("http://www.blogblog.com/harbor/divider.gif") no-repeat center bottom; margin:0 0 15px; padding:0 0 15px; } .main .Blog { background-image: none; } .sidebar ul { list-style:none; margin-left: 0; } .sidebar li { margin:0; padding-top:0; padding-right:0; padding-bottom:.25em; padding-left:15px; text-indent:-15px; line-height:1.5em; } .sidebar p { color:#666; line-height:1.5em; } /* Profile ----------------------------------------------- */ .profile-datablock { margin:.5em 0 .5em; } .profile-data { margin:0; font: normal normal 78% Georgia, Serif; font-weight: bold; line-height: 1.6em; text-transform:uppercase; letter-spacing:.1em; } .profile-img { float: left; margin-top: 0; margin-right: 5px; margin-bottom: 5px; margin-left: 0; padding: 4px; border: 1px solid #cde; } .profile-textblock { margin:.5em 0 .5em; } .profile-link { font:78%/1.4em Georgia,Serif; text-transform:uppercase; letter-spacing:.1em; } /* Footer ----------------------------------------------- */ #footer-wrapper { clear:both; padding-top:15px; padding-right:30px; padding-bottom:0; padding-left:50px; text-align: center; } #footer .widget { background:url("http://www.blogblog.com/harbor/divider.gif") no-repeat center top; margin:0; padding-top:15px; line-height: 1.6em; text-transform:uppercase; letter-spacing:.1em; } -->

Tuesday, November 18, 2008

Copying SQL server logins from one server to another server

On the old server, copy the following Capture_Logins.sql script to the local hard disk.
************************************************************************************************/
USE master
GO
IF OBJECT_ID ('sp_hexadecimal') IS NOT NULL
DROP PROCEDURE sp_hexadecimal
GO
CREATE PROCEDURE sp_hexadecimal
@binvalue varbinary(256),
@hexvalue varchar(256) OUTPUT
AS
DECLARE @charvalue varchar(256)
DECLARE @i int
DECLARE @length int
DECLARE @hexstring char(16)
SELECT @charvalue = '0x'
SELECT @i = 1
SELECT @length = DATALENGTH (@binvalue)
SELECT @hexstring = '0123456789ABCDEF'
WHILE (@i <= @length) BEGIN DECLARE @tempint int DECLARE @firstint int DECLARE @secondint int SELECT @tempint = CONVERT(int, SUBSTRING(@binvalue,@i,1)) SELECT @firstint = FLOOR(@tempint/16) SELECT @secondint = @tempint - (@firstint*16) SELECT @charvalue = @charvalue + SUBSTRING(@hexstring, @firstint+1, 1) + SUBSTRING(@hexstring, @secondint+1, 1) SELECT @i = @i + 1 END SELECT @hexvalue = @charvalue GO IF OBJECT_ID ('sp_help_revlogin') IS NOT NULL DROP PROCEDURE sp_help_revlogin GO CREATE PROCEDURE sp_help_revlogin @login_name sysname = NULL AS DECLARE @name sysname DECLARE @xstatus int DECLARE @binpwd varbinary (256) DECLARE @txtpwd sysname DECLARE @tmpstr varchar (256) DECLARE @SID_varbinary varbinary(85) DECLARE @SID_string varchar(256) IF (@login_name IS NULL) DECLARE login_curs CURSOR FOR SELECT sid, name, xstatus, password FROM master..sysxlogins WHERE srvid IS NULL AND name <> 'sa'
ELSE
DECLARE login_curs CURSOR FOR
SELECT sid, name, xstatus, password FROM master..sysxlogins
WHERE srvid IS NULL AND name = @login_name
OPEN login_curs
FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd
IF (@@fetch_status = -1)
BEGIN
PRINT 'No login(s) found.'
CLOSE login_curs
DEALLOCATE login_curs
RETURN -1
END
SET @tmpstr = '/* sp_help_revlogin script '
PRINT @tmpstr
SET @tmpstr = '** Generated '
+ CONVERT (varchar, GETDATE()) + ' on ' + @@SERVERNAME + ' */'
PRINT @tmpstr
PRINT ''
PRINT 'DECLARE @pwd sysname'
WHILE (@@fetch_status <> -1)
BEGIN
IF (@@fetch_status <> -2)
BEGIN
PRINT ''
SET @tmpstr = '-- Login: ' + @name
PRINT @tmpstr
IF (@xstatus & 4) = 4
BEGIN -- NT authenticated account/group
IF (@xstatus & 1) = 1
BEGIN -- NT login is denied access
SET @tmpstr = 'EXEC master..sp_denylogin ''' + @name + ''''
PRINT @tmpstr
END
ELSE BEGIN -- NT login has access
SET @tmpstr = 'EXEC master..sp_grantlogin ''' + @name + ''''
PRINT @tmpstr
END
END
ELSE BEGIN -- SQL Server authentication
IF (@binpwd IS NOT NULL)
BEGIN -- Non-null password
EXEC sp_hexadecimal @binpwd, @txtpwd OUT
IF (@xstatus & 2048) = 2048
SET @tmpstr = 'SET @pwd = CONVERT (varchar(256), ' + @txtpwd + ')'
ELSE
SET @tmpstr = 'SET @pwd = CONVERT (varbinary(256), ' + @txtpwd + ')'
PRINT @tmpstr
EXEC sp_hexadecimal @SID_varbinary,@SID_string OUT
SET @tmpstr = 'EXEC master..sp_addlogin ''' + @name + ''
+ ''', @pwd, @sid = ' + @SID_string + ', @encryptopt = '
END
ELSE BEGIN
-- Null password
EXEC sp_hexadecimal @SID_varbinary,@SID_string OUT
SET @tmpstr = 'EXEC master..sp_addlogin ''' + @name + ''
+ ''', NULL, @sid = ' + @SID_string + ', @encryptopt = '
END
IF (@xstatus & 2048) = 2048
-- login upgraded from 6.5
SET @tmpstr = @tmpstr + '''skip_encryption_old'''
ELSE
SET @tmpstr = @tmpstr + '''skip_encryption'''
PRINT @tmpstr
END
END
FETCH NEXT FROM login_curs INTO @SID_varbinary, @name, @xstatus, @binpwd
END
CLOSE login_curs
DEALLOCATE login_curs
RETURN 0
GO
IF OBJECT_ID ('seeMigrateSQLLogins') IS NOT NULL
DROP PROCEDURE seeMigrateSQLLogins
GO
create procedure seeMigrateSQLLogins @login_name sysname = NULL
as
declare
@name char(50),
@binpwd varbinary (256),
@txtpwd sysname,
@tmpstr varchar (256),
@SID_varbinary varbinary(85),
@SID_string varchar(256),
@Is_Policy bit,
@Is_Exp bit,
@type char(1),
@Pol char(3),
@Exp char(3)
set nocount on
create table #logins (
[name] nvarchar(128) NOT NULL,
[sid] varbinary(85) NOT NULL,
[type] char(1) NOT NULL,
[is_policy_checked] bit default 0,
[is_expiration_checked] bit default 0,
[password_hash] varbinary(256) )
insert #logins (name, sid, type)
select name, sid, type from sys.server_principals where
(type_desc = 'SQL_LOGIN' or type_desc = 'WINDOWS_LOGIN') and name <> 'sa' and name <> 'NT AUTHORITY\SYSTEM'
update a set a.is_policy_checked = b.is_policy_checked, a.is_expiration_checked = b.is_expiration_checked, a.password_hash = b.password_hash
from #logins a, sys.sql_Logins b
where a.sid = b.sid
set nocount off
IF (@login_name IS NULL) --Not a single user, get the list
DECLARE seelogin_curs CURSOR FOR
SELECT name, sid, password_hash, type, is_policy_checked, is_expiration_checked FROM #logins
WHERE name <> 'sa'
ELSE
DECLARE seelogin_curs CURSOR FOR
SELECT name, sid, password_hash, type, is_policy_checked, is_expiration_checked FROM #logins
WHERE name = @login_name
OPEN seelogin_curs
FETCH NEXT FROM seelogin_curs INTO @name, @SID_varbinary, @binpwd, @type, @Is_Policy, @Is_Exp
IF (@@fetch_status = -1)
BEGIN
PRINT 'No login(s) found.'
CLOSE seelogin_curs
DEALLOCATE seelogin_curs
END
SET @tmpstr = '/* seeMigrateSQLLogins - For SQL Server 2005 Only '
PRINT @tmpstr
SET @tmpstr = '** Generated '
+ CONVERT (varchar, GETDATE()) + ' on ' + @@SERVERNAME + ' */'
PRINT @tmpstr
PRINT ''
WHILE (@@fetch_status <> -1)
BEGIN
IF @type = 'S'
BEGIN
PRINT '/* SQL Login ******************/'
EXEC sp_hexadecimal @binpwd, @txtpwd OUT
EXEC sp_hexadecimal @SID_varbinary, @SID_string OUT
IF @Is_Policy = 1 Begin set @Pol = 'ON' End ELSE Begin set @Pol = 'OFF' END
IF @Is_Exp = 1 Begin set @Exp = 'ON' End ELSE Begin set @Exp = 'OFF' END
SET @tmpstr = 'Create Login [' + rtrim(@name) + '] WITH PASSWORD = ' + @txtpwd + ' hashed, sid = ' + @SID_string + ', CHECK_POLICY = ' + @Pol + ', CHECK_EXPIRATION = ' + @Exp
PRINT @tmpstr
PRINT ''
END
Else
BEGIN
PRINT '/* SQL Login ******************/'
EXEC sp_hexadecimal @SID_varbinary, @SID_string OUT
SET @tmpstr = 'Create Login [' + rtrim(@name) + '] FROM WINDOWS; '
PRINT @tmpstr
PRINT ''
END
FETCH NEXT FROM seelogin_curs INTO @name, @SID_varbinary, @binpwd, @type, @Is_Policy, @Is_Exp
END
CLOSE seelogin_curs
DEALLOCATE seelogin_curs
drop table #logins
GO

declare
@version2005 char(5)
declare
@version2008 char(5)

--Get the current version of SQL Server running
select @version2005 = substring(@@version,29,4)
select @version2008 = substring(@@version,35,4)

if @version2005 = '9.00'
Begin
exec seeMigrateSQLLogins
End
Else if @version2008 = '10.0'
Begin
exec seeMigrateSQLLogins
End
Else
begin
exec sp_help_revlogin
End

IF OBJECT_ID ('sp_hexadecimal') IS NOT NULL
DROP PROCEDURE sp_hexadecimal
GO
IF OBJECT_ID ('sp_help_revlogin') IS NOT NULL
DROP PROCEDURE sp_help_revlogin
GO
IF OBJECT_ID ('seeMigrateSQLLogins') IS NOT NULL
DROP PROCEDURE seeMigrateSQLLogins
GO
/* End Script */

On the old server, run the Capture_Logins.sql script to capture all SQL Server logins and password information. All SQL Server logins that are using the SQL Server installation on the old server will be captured. Follow these steps, based on the SQL Server that tools you use:
· If you use SQL Server Management Studio, follow these steps:
a. Click Start, point to All Programs, point to Microsoft SQL Server 2005, and then click SQL Server Management Studio.
b. In the Connect to Server window, follow these steps:
1. In the Server name box, type the name of the server that is running SQL Server.
2. In the Authentication box, click SQL Authentication.
3. In the Login box, type sa.
4. In the Password box, type the password for the sa user, and then click Connect.
c. Click File, point to Open, and then click File.
d. In the Look In list, click the Capture_Logins.sql script that you copied to the local hard disk in step 1, and then click Open.
e. In the Connect to Database Engine window, follow these steps: In the Server Name box, type the name of the old server that is running SQL Server.
1. In the Authentication box, click SQL Authentication.
2. In the Login box, type sa.
3. In the Password box, type the password for the sa user, and then click Connect.
4. f. Click Query, point to Results to, and then click Results to File.
f. Click Query, and then click Execute.
g. In the Save Results window, follow these steps:
1. In the Save in list, click the location where you want to save the results of the script.
2. In the File name box, type SQLLOGINS.sql, and then click Save.

· If you use Query Analyzer, follow these steps:
a. Click Start, point to All Programs, point to Microsoft SQL Server, and then click Query Analyzer.
b. In the Connect to SQL Server window, follow these steps:
1. In the SQL Server box, type the name of the old server that is running SQL Server.
2. In the Connect using area, click SQL Server Authentication.
3. In the Login name box, type sa.
4. In the Password box, type the password for the sa user, and then click OK.
c. Click File, and then click Open.
d. In the Open Query File window, in the Look In list, click the Capture_Logins.sql script that you copied to the local hard disk of the old server in step 1, and then click Open.
e. Click Query, and then click Results to File.
f. Click Query, and then click Execute.
g. In the Save Results window, follow these steps:
1. In the Save in list, click the location where you want to save the results of the script.
2. In the File name box, type SQLLOGINS.sql, and then click Save.

Monday, July 14, 2008

Repair a Database with a deleted Transaction log

You have your network team say "Hey, this client ran out of disk space so we deleted the transaction log for the database as it was over 100GB". Now the problem was that the applications started failing. I ran into a few hints in BOL and then other hints on the Internet. After all the research, I determined the best issue would be to use the DBCC CHECKDB with the REPAIR_ALLOW_DATA_LOSS option. BOL stated this option could repair a transaction log. Well what could be more broken about a transaction log than not having one at all?

Using the following method you can have the Log file recreated.
/*Stop SQL service,
rename DbLive.mdf to DbBad.mdf.
started SQL service, created fake DbLive db (with log etc)
Stopped SQL service
Deleted DbLIve.mdf
Renamed DbBad.MDF to DbLive.MDF
Started SQL service.
Run following script:*/
ALTER DATABASE DbLive SET EMERGENCY
sp_dboption 'DbLive', 'single user', 'true' DBCC CHECKDB ('DbLive', REPAIR_ALLOW_DATA_LOSS)sp_dboption 'DbLive', 'single user', 'false'

This should solve the issue.

Monday, June 30, 2008

Calculating the age of an individual

How do you calculate the age of an individual? This can become an extremely hot topic and the number of solutions provided can be numerous. Before attacking this “controversial” topic, it actually would help set some ground rules to be used in designing an aging algorithm.
The most difficult part of this, of course, is figuring out what to do with leap years. When does a person (or object or document for that matter) become another year older? For the purposes of this short article, I propose that this occurs on February 28th of non leap years.
Some Basic Date Arithmetic, SQL StyleBefore diving into the aging calculation, let’s look at some very simple date arithmetic using SQL Server 2005.


First, let’s see what adding 1 year to February 28, 2008 and February 29, 2008 returns.

declare @date1 datetime,
@date2 datetime
set @date1 = '20080228'
set @date2 = '20080229'
select dateadd(yy, 1, @date1), dateadd(yy, 1, @date2)
Result:
2009-02-28 00:00:00.000 2009-02-28 00:00:00.000


Interesting, adding 1 year to both dates results in the same date. Let’s see what happens when you now add 4 years.
declare @date1 datetime,

@date2 datetime
set @date1 = '20080228'
set @date2 = '20080229'
select dateadd(yy, 4, @date1), dateadd(yy, 4, @date2)
Result:
2012-02-28 00:00:00.000 2012-02-29 00:00:00.000


Now that is interesting, both dates aren’t the same. This is what would be expected.
Now, let’s look at DATEDIFF and see how it works a little.
declare @date1 datetime,

@date2 datetime
set @date1 = '20080229'
set @date2 = '20090228'
select datediff(yy, @date1, @date2)
Result: 1

But wait, what is the result of the following?
declare @date1 datetime,

@date2 datetime
set @date1 = '20081231'
set @date2 = '20090101'
select datediff(yy, @date1, @date2)
Result:1

Wait! That isn’t right, there isn’t a year between those two dates, something must be wrong. As you can see, the DATEDIFF function returned the number of year’s difference between the year values of the two dates, not the actual number of years between the two dates.
We can use this information to create a simple algorithm:
Age = datediff(yy, date_of_birth, Today)
– (if Today < dateadd(yy, datediff(yy, date_of_birth, Today),date_of_birth)
then 1 else 0
Or, to put it in to T-SQL:
declare @dob datetime,

@age int,
@day datetime
set @day = '2008-02-28'
set @dob = '2007-03-01'
set @age = datediff(yy,@dob,@day)–
case
when @day <>
then 1
else 0
end
select @age

Conclusion
This is just one method to use to calculate the age of person in code. There are numerous other methods that can also be used. All I can suggest, is use the method that works best for you in your situation.

Thursday, June 19, 2008

Scripting Trace's for SQL 2005

I wrote this with the assumption that you will be running traces around job steps, but you can adapt this for other situations.I used the default traces for both SQL and AS traces, but removed most of the Events from both the trace definitions in this write-up, just for brevity, you can generate the full definitions from Profiler by following the steps below. I have sections for SQL Server and Analysis Services.

SQL Trace

1. Create the Trace you want in SQL Profiler, on a SQL connection, setting a Trace Name (I used 'OLAP Error SQL Data'), Save To File with a file name and a file size large enough to hold the data in a single file (I used 32MB), and the Events / Data fields you want to trace. Run the Trace.

2. Stop the Trace - we don't actually want to collect anything at this point.

3. Export the trace definition - select "FileExportScript Trace DefintionFor SQL 2005...". Export the definition to a .SQL file of your choice.

4. Close Profiler, we're done with it at this point.In the SQL job with the step you want to trace, you're going to add a couple of steps before the step you want to trace...

5. Add the following code to a T-SQL job step, named 'Delete SQL Trace File', before the step you want to trace (this step is just to make sure there isn't a file from a previous run, just in case):
DECLARE @OS_Cmd VARCHAR(1024)SELECT @OS_Cmd = 'DEL /Q /F "C:\Trace Files\OLAP Error SQL Data.trc"'EXECUTE master.dbo.xp_cmdshell @OS_CmdNOTE - you will get an error code 12 from the sp_trace_create call in the next step if the output file already exists.


6. Add the code from the SQL file you just created to a T-SQL job step, named 'Start SQL Trace', immediately after the 'Delete SQL Trace File' step. Replace the literal 'InsertFileNameHere' with the destination you want for the trace file (don't add .trc to the end of the file name - it will automatically get added for you). It should look like this:

-- Create a Queue
declare @rc int
declare @TraceID int
declare @maxfilesize bigint
set @maxfilesize = 32-- Please replace the text InsertFileNameHere, with an appropriate
--filename prefixed by a path, e.g., c:\MyFolder\MyTrace. The .trc extension
--will be appended to the filename automatically. If you are writing from
-- remote server to local drive, please use UNC path and make sure server has
-- write access to your network share
exec @rc = sp_trace_create @TraceID output, 0, N'C:\Trace Files\OLAP Error SQL Data', @maxfilesize, NULL
if (@rc != 0) goto error
-- Client side File and Table cannot be scripted
-- Set the events
declare @on bit
set @on = 1
exec sp_trace_setevent @TraceID, 14, 1, @on
exec sp_trace_setevent @TraceID, 14, 9, @on
exec sp_trace_setevent @TraceID, 14, 6, @on
exec sp_trace_setevent @TraceID, 14, 10, @on
exec sp_trace_setevent @TraceID, 14, 14, @on
exec sp_trace_setevent @TraceID, 14, 11, @on
exec sp_trace_setevent @TraceID, 14, 12, @on
-- Set the Filters
declare @intfilter int
declare @bigintfilter bigint
exec sp_trace_setfilter @TraceID, 10, 0, 7, N'SQL Server Profiler - 1745445d-46a5-4050-9922-16caf3851690'
-- Set the trace status to start
exec sp_trace_setstatus @TraceID, 1
-- display trace id for future references
select
TraceID=@TraceID
goto finish
error: select
ErrorCode=@rc
finish:
Now you're going to add a couple of steps after the step you want to trace...

7. You need to stop the trace. Add this code to a T-SQL step named 'Stop SQL Trace', note that the file name in the WHERE clause must match the file name you specified above in the 'Start SQL Trace' step, with the '.trc. added to the end:
DECLARE @TraceID INT
SELECT @TraceID = TraceID FROM :: fn_trace_getinfo(0)WHERE CAST([value] AS VARCHAR(256)) = 'C:\Trace Files\OLAP Error SQL Data.trc'
IF @TraceID IS NOT NULL

BEGIN
PRINT 'Closing Trace ID ' + CAST (@TraceID AS VARCHAR(5))
EXEC sp_trace_setstatus @TraceID, 0

EXEC sp_trace_setstatus @TraceID, 2
END

10 Steps to Secure SQL server

1. KEEP SYSTEMS UP TO DATE ON SERVICE PACKS, PATCHES, AND NEW VERSIONS OF THE OS AND SQL SERVER.
Tools to help with this: Idera SQLsecure is a tool for assessing security across your SQL Servers. SQLsecure collects SQL Server version number and service pack as part of the overall security assessment it performs. The version number returned by SQLsecure will also reflect the latest patches applied, so you can determine which servers are out of date.

2. ENFORCE STANDARDS FOR SECURE PASSWORDS.
Tools to help with this: If you are using SQL Server 2005, use the built-in security options to enforce password complexity standards. For previous versions of SQL Server, there are many publicly available scripts you can use to identify accounts with weak or non-existent passwords.

3. SECURE THE FULL ENVIRONMENT, INCLUDING THE OS AND NETWORK
Tools to help with this: Microsoft has many documents and knowledge base articles that provide more detail and best practices on how to ensure server and application security.

4. ENSURE APPROPRIATE SQL SERVER SETUP
Tools to help with this: Idera SQLsecure checks most of these settings across your SQL Servers and reports back where you have potential security concerns.

5. REGULARLY ASSESS WHO CAN ACCESS YOUR SQL SERVERS AND TAKE ACTION TO LIMIT ACCESS.
Tools to help with this: Idera SQLsecure can make this task a whole lot easier for you. SQLsecure automatically generates a complete list of all users with access to your SQL Server, across Windows, Active Directory, and SQL Server, including users that have access as a result of group membership.

6. ASSESS WHAT ACTIONS USERS CAN PERFORM ON WHAT OBJECTS
Tools to help with this: Accurately assessing effective permissions on SQL Server objects is really, really, hard to do manually. Idera SQLsecure can perform this time consuming task for you. SQLsecure calculates both effective (derived) and assigned permissions for all users, on any database object, making it much easier to ensure that users have appropriate access rights.

7. KEEP AN AUDIT TRAIL OF DATABASE ACTIVITY.
Tools to help with this: SQL Server does provide native C2 auditing to help with gathering this information. However, C2 auditing can be very resource intensive and does not provide fine-grained controls to let you specify what you want to collect. You may instead want to use a 3rd party auditing tool, such as Idera SQL compliance manager. SQL compliance manager provides customizable, low-impact auditing, alerting and reporting on virtually all activity across multiple SQL servers.

8. AUDIT THE ACTIONS OF ADMIN USERS ON YOUR SQL SERVERS
Tools to help with this: The native SQL Server C2 auditing can also be used for this purpose, however, there is no way to ensure an immutable trail of audit data with the native tools. So, what’s to stop an admin user from deleting the audit trail after making off with your customer list? A better solution is to invest in a 3rd party tool such as SQL compliance manager which can audit ALL admin user activity, and protect the audit trail from tampering by anyone, even admin users.

9. SECURE AND AUDIT THE APPLICATIONS THAT ACCESS YOUR SQL SERVER DATABASES
Tools to help with this: Idera SQL compliance manager will track the activity of any application within a targeted database. And, alerts can be configured to watch for and provide immediate notification of questionable behavior, such as a business application accessing a database it shouldn’t.

10. ENSURE PROPER DATABASE BACKUPS, AND SECURE YOUR BACKUPS
Tools to help with this: While SQL Server does give you the ability to backup your databases, it does not include the ability to encrypt the backups, or compress them to help save space and reduce backup time. Idera SQLsafe provides a high performance backup solution which includes encryption and compression. SQLsafe also lets you create backup policies with exception reporting to notify you of any backups that failed to run as scheduled.

Wednesday, June 18, 2008

Automate Audit Requests

Periodically an internal auditor would stop by my desk and ask me to show them all the users and permissions on a particular database. This is easily gathered, and most of us know how to do this. I produce this list, and send it on to the requestor. The next question is ‘What has changed?’. I do not have this information, and have to tell them I do not know. So, after a couple iterations of this (I will not admit how many) I finally devised a simple way to store this information to adequately respond to this question. A snapshot of the user information is gathered from time to time, and stored into a history table. The next time a request comes in I can compare the current values to the historical ones. This would become tedious as it usually involved some spreadsheets or query results, and manually reviewing the data, looking for new records, changed records and so on. When I would produce these two sets of data (current and 1 historical snapshot) and give the data back to the auditor, they were initially happy, until they realized all the time that would be involved to perform an adequate review of the two sets of data. The next question would invariably be, is there a way to automate this? There is always a way to automate anything, I would respond, and skulk back to my DBA hole and pound out some more code. After more review of the results I produced and even more back and forth between me and the auditor, we finally decided which fields we needed to see, the differences we should show, etc. I will now try to explain the system that we devised to assist us in this simple, yet complex endeavor. System We basically want to know what the users looked like at a point in time. Compare it to the current state of the users, and show differences. Differences will be defined as: new records, changed records, removed records and old records. We want to be able to dynamically include any number of new and existing servers into this system, though we started with a single server. We want to be able to display these results for a period of time, and allow reporting to occur based on the results. Since we already have a monitoring server setup, this was the perfect place to locate this system. We already use linked servers to connect too and monitor all our remote servers, so we will continue in this vein for this system. Justifications can be read in other articles I’ve written about monitoring with linked servers. A configuration table needs to indicate which servers we will be monitoring. Some stored procedures will need to use dynamic sql, cycle thru the config list of servers, and call them to gather data, stage the data, and then do comparisons against this data. Then resulting data can be reported on. That’s the high level description. I will now go through all the objects, with more detail. Tables DatabaseRolesConfig This table will contain a ServerName and an Enabled flag. This allows us to setup multiple servers to be monitored, and enable and disable them at whim. The fields for this table are as follows: ServerName sysname not null, Enabled bit not null default 0 DatabaseRolesStaging This table will allow us to store data we have pulled down from all the remote servers. Once staged, we can query it and process it to our desires. The fields in this table are as follows:
Type sysname null default '',
ServerName sysname null default '',
DBName sysname null default '',
LoginName sysname null default '',
UserName sysname null default '',
db_owner varchar(3) null default '',
db_accessadmin varchar(3) null default '',
db_securityadmin varchar(3) null default '',
db_ddladmin varchar(3) null default '',
db_datareader varchar(3) null default '',
db_datawriter varchar(3) null default '',
db_denydatareader varchar(3) null default '',
db_denydatawriter varchar(3) null default '',
denylogin int null default '',
hasaccess int null default '',
isntname int null default '',
isntgroup int null default '',
isntuser int null default '',
sysadmin int null default '',
securityadmin int null default '',
serveradmin int null default '',
setupadmin int null default '',
processadmin int null default '',
diskadmin int null default '',
dbcreator int null default '',
bulkadmin int null default '' All the fields accept a default of blank, for reporting purposes. Not all fields for every row will have a value. This eliminates the display of null values in the report. This is just a simple step I chose to make life easy

DatabaseRolesHistory
This table is identical to the staging table, and will hold the processed data from the last execution for comparison to the next execution.
DatabaseRolesArchive
This table is identical to the staging table, except that it has an added Identity field for uniqueness, and a [Date] field that will contain the date of the archived data. This will be a holding area for all the data processes and displayed from past executinos. The extra fields are as follows:

ID integer not NULL IDENTITY(1,1),
[Date] datetime not null default getdate(),

DatabaseRolesDisplay
This table is similar to the above two tables. We’ve added an Identity field for uniqueness. A Version field to keep track of the previous version (Type). This will be the table that contains the processed data, grouped by type, and cleaned up. We will report from this table, as it will have the result set of data after processing. The fields of this table are as follows:

ID integer not NULL IDENTITY(1,1),
Version VARCHAR(8) not null,

Stored Procedures
sp_GetDBRoles
This procedure be passed a ServerName, DatabaseName and a UserName. The last two params were never implemented. But the ServerName determines which server we will be pulling data from. We use dynamic sql to pull information from the syslogins from the remote server, and then store it in the DatabaseRolesStaging table, with a ServerName and Type included. Then we create a cursor that will cycle thru each database in sysdatabases, except a few. Inside the cursor, we will dynamically call sql that pulls more data from sysmembers, sysusers and syslogins, retrieving those users that have roles set. This data is also stored in the staging table also. We will next use dynamic sql to pull more data from sysusers and syslogins, retrieving those users that do not have roles set. This data is also stored in the staging table.

sp_GetAllDBRoles
This will cycle thru the config table DatabaseRolesConfig and call the sp_GetDBRoles proc for each server that is enabled to be processed. sp_ProduceDatabaseRolesResults
Since we have previously gathered records into the DatabaseRolesStaging table, we can now compare these results to some other tables of historical data. We have a history table called DatabaseRolesHistory that contains the last set of data we gathered about Users.

1. We select the new values, and insert them into a memory table @DatabaseRolesStaging with a flag indicating they are ‘new’.
2. We select the old values that have changed, and insert them into the table @DatabaseRolesStaging with a flag indicating they are ‘old’.
3. We update the ‘new’ values that have changed. We determine this if there is a new record, and an old with some of the same values (Servername, DBName, UserName and LoginName). These are updated in the table @DatabaseRolesStaging with a flag indicating they are ‘changed’.
4. We find the records that were removed from the DatabaseRolesHistory table, compared to the DatabaseRolesStaging table. These records are inserted into the table @DatabaseRolesStaging with a flag indicating they are ‘removed’.
5. Records that were simply altered are removed from the memory table @DatabaseRolesStaging.
This resulting data is now labled and ready to display to the requestor. We process the data from the @DatabaseRolesStaging table, and order it with ‘Old’ and ‘Changed’ being first, then the ‘Removed’ records, followed by the ‘New’ records. This just helps in the viewing of the data, with the important ones being first, and so on. This resulting data is dumped into a real table called DatabaseRolesDisplay, and will live there until the next execution of this process. This allows me to reselect from this data when needed between executions. I used to have this simply returned once as part of the proc call, but would tend to need to look at the data subsequently; this solves that need.
sp_ProcessDatabaseRoles
This stored procedure takes a ServerName as a parameter. If you use this option, it will call the procedure sp_GetDBRoles for just that ServerName. If you leave the ServerName blank, then it will call sp_GetAllDBRoles and process all enabled ServerNames from the config table. This proc will then get the user data into the staging table as described above. It will then call the sp_ProduceDatabaseRolesResults procedure, which will process the data in the staging table, comparing it to the historical data. Then the data in the history table will be pumped into the Archive table. The history table will be truncated. The current staging data will be pumped into the history table, and there await the next execution. This is the Gem of the sytem, taking all other parts into account, and doing it all for you. This can be called singly when the Auditor requests it. Or you can schedule it to run as a job, and simply query the resulting data in the DatabaseRolesDisplay table. There are many options you now have to follow, depending on your own needs. This system will allow you to gather user information, stage it, and store it historically. Allowing you the chance to see back into the past at a snapshot of what once was. No more will you be stymied by auditors or others with the questions of what users do we have in the system, and how do they compare to a year ago. You have the data, you are empowered, you are the DBA in control and informed. I hope that this system will help you gather the needed data and have it onhand to help out with your user reviews.

Tuesday, May 13, 2008

How to search all columns of all tables in a database for a keyword?

While browsing the SQL Server newsgroups, every once in a while, I see a request for a script that can search all the columns of all the tables in a given database for a specific keyword. I had no such script handy at that time, so we ended up searching manually. That's when I really felt the need for such a script and came up with this stored procedure "SearchAllTables". It accepts a search string as input parameter, goes and searches all char, varchar, nchar, nvarchar columns of all tables (only user created tables. System tables are excluded), owned by all users in the current database. Feel free to extend this procedure to search other datatypes. The output of this stored procedure contains two columns: -
1) The table name and column name in which the search string was found -
2) The actual content/value of the column (Only the first 3630 characters are displayed) Here's a word of caution, before you go ahead and run this procedure. Though this procedure is quite quick on smaller databases, it could take hours to complete, on a large database with too many character columns and a huge number of rows. So, if you are trying to run it on a large database, be prepared to wait (I did use the locking hint NOLOCK to reduce any locking). It is efficient to use Full-Text search feature for free text searching, but it doesn't make sense for this type of ad-hoc requirements. Create this procedure in the required database and here is how you run it:--To search all columns of all tables in Pubs database for the keyword "Computer" EXEC SearchAllTables 'Computer'GO Here is the complete stored procedure code:
CREATE PROC SearchAllTables
(
@SearchStr nvarchar(100)
)
AS
BEGIN
CREATE TABLE #Results (ColumnName nvarchar(370), ColumnValue nvarchar(3630))
SET NOCOUNT ON
DECLARE @TableName nvarchar(256), @ColumnName nvarchar(128), @SearchStr2 nvarchar(110)
SET @TableName = ''
SET @SearchStr2 = QUOTENAME('%' + @SearchStr + '%','''')
WHILE @TableName IS NOT NULL
BEGIN
SET @ColumnName = ''
SET @TableName =
(
SELECT MIN(QUOTENAME(TABLE_SCHEMA) + '.' + QUOTENAME(TABLE_NAME))
FROM INFORMATION_SCHEMA.TABLES
WHERE TABLE_TYPE = 'BASE TABLE'
AND QUOTENAME(TABLE_SCHEMA) + '.' + QUOTENAME(TABLE_NAME) > @TableName
AND OBJECTPROPERTY(
OBJECT_ID(
QUOTENAME(TABLE_SCHEMA) + '.' + QUOTENAME(TABLE_NAME)
), 'IsMSShipped'
) = 0
)
WHILE (@TableName IS NOT NULL) AND (@ColumnName IS NOT NULL)
BEGIN
SET @ColumnName =
(
SELECT MIN(QUOTENAME(COLUMN_NAME))
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_SCHEMA = PARSENAME(@TableName, 2)
AND TABLE_NAME = PARSENAME(@TableName, 1)
AND DATA_TYPE IN ('char', 'varchar', 'nchar', 'nvarchar')
AND QUOTENAME(COLUMN_NAME) > @ColumnName
)

IF @ColumnName IS NOT NULL
BEGIN
INSERT INTO #Results
EXEC
(
'SELECT ''' + @TableName + '.' + @ColumnName + ''', LEFT(' + @ColumnName + ', 3630)
FROM ' + @TableName + ' (NOLOCK) ' +
' WHERE ' + @ColumnName + ' LIKE ' + @SearchStr2
)
END
END
END
SELECT ColumnName, ColumnValue FROM #Results
END