I'm creating a program to generate my database schema using the SMO libraries distributed with SQL Server 2008.
I've gotten the scripter outputting code which is virtually the same as SQL Server Management Studio outputs when it's configured to output everything, but with one curious exception: it doesn't output comment headers for the foreign key constraints it generates at the bottom, whereas SSMS does. Can anyone figure out why this is? Here's my code:
private void btnExportScript_Click(object sender, EventArgs ea) {
Server srv = setupConnection();
// Reference the database
if (!srv.Databases.Contains(cbChooseDb.SelectedItem.ToString())) {
_utils.ShowError("Couldn't find DB '" + cbChooseDb.SelectedItem.ToString() + "'.");
return;
}
Database db = srv.Databases[cbChooseDb.SelectedItem.ToString()];
StringBuilder builder = new StringBuilder();
try {
Scripter scrp = new Scripter(srv);
scrp.Options.AppendToFile = false;
scrp.Options.ToFileOnly = false;
scrp.Options.ScriptDrops = false; // Don't script DROPs
scrp.Options.Indexes = true; // Include indexes
scrp.Options.DriAllConstraints = true; // Include referential constraints in the script
scrp.Options.Triggers = true; // Include triggers
scrp.Options.FullTextIndexes = true; // Include full text indexes
scrp.Options.NonClusteredIndexes = true; // Include non-clustered indexes
scrp.Options.NoCollation = false; // Include collation
scrp.Options.Bindings = true; // Include bindings
scrp.Options.SchemaQualify = true; // Include schema qualification, eg. [dbo]
scrp.Options.IncludeDatabaseContext = false;
scrp.Options.AnsiPadding = true;
scrp.Options.FullTextStopLists = true;
scrp.Options.IncludeIfNotExists = false;
scrp.Options.ScriptBatchTerminator = true;
scrp.Options.ExtendedProperties = true;
scrp.Options.ClusteredIndexes = true;
scrp.Options.FullTextCatalogs = true;
scrp.Options.SchemaQualifyForeignKeysReferences = true;
scrp.Options.XmlIndexes = true;
scrp.Options.IncludeHeaders = true;
// Prefectching may speed things up
scrp.PrefetchObjects = true;
var urns = new List<Urn>();
// Iterate through the tables in database and script each one.
foreach (Table tb in db.Tables) {
if (tb.IsSystemObject == false) {
// Table is not a system object, so add it.
urns.Add(tb.Urn);
}
}
// Iterate through the views in database and script each one. Display the script.
foreach (Microsoft.SqlServer.Management.Smo.View view in db.Views) {
if (view.IsSystemObject == false) {
// View is not a system object, so add it.
urns.Add(view.Urn);
}
}
// Iterate through the stored procedures in database and script each one. Display the script.
foreach (StoredProcedure sp in db.StoredProcedures) {
if (sp.IsSystemObject == false) {
// Procedure is not a system object, so add it.
urns.Add(sp.Urn);
}
}
// Start by manually adding DB context
builder.AppendLine("USE [" + db.Name + "]");
builder.AppendLine("GO");
System.Collections.Specialized.StringCollection sc = scrp.Script(urns.ToArray());
foreach (string st in sc) {
// It seems each string is a sensible batch, and putting GO after it makes it work in tools like SSMS.
// Wrapping each string in an 'exec' statement would work better if using SqlCommand to run the script.
builder.Append(st.Trim(new char[] { '\r', '\n' }) + "\r\nGO\r\n");
}
}
catch (Exception ex) {
showExceptionError("Couldn't generate script.", ex);
return;
}
try {
File.WriteAllText(txtExportToFile.Text, builder.ToString());
_utils.ShowInfo("DB exported to script at: " + txtExportToFile.Text);
}
catch (Exception ex) {
showExceptionError("Couldn't save script file.", ex);
return;
}
}
Note that foreign keys fall under the category of DRI constraints, and are scripted because of scrp.Options.DriAllConstraints = true;.
I have a solution here: Can't get EnumScript() to generate constraints
for some reason, Scripter won't emit DRI constraints (foreign keys, etc) when it's given a list of Urns, but it will if it's given one Urn at a time. The trick there is to give the Urns in the order of their ancestry: tables have to be defined before they're referenced by constraints. To do that I've used the DependencyWalker.
Here's a synopsis:
var urns = new List<Urn>();
Scripter schemaScripter = new Scripter(srv) { Options = schemaOptions };
Scripter insertScripter = new Scripter(srv) { Options = insertOptions };
var dw = new DependencyWalker(srv);
foreach (Table t in db.Tables)
if (t.IsSystemObject == false)
urns.Add(t.Urn);
DependencyTree dTree = dw.DiscoverDependencies(urns.ToArray(), true);
DependencyCollection dColl = dw.WalkDependencies(dTree);
foreach (var d in dColl)
{
foreach (var s in schemaScripter.Script(new Urn[] { d.Urn }))
strings.Add(s);
strings.Add("GO");
if (scriptData)
{
int n = 0;
foreach (var i in insertScripter.EnumScript(new Urn[] {d.Urn}))
{
strings.Add(i);
if ((++n) % 100 == 0)
strings.Add("GO");
}
}
}
Note: adding a "GO" every so often keeps the batch size small so SSMS doesn't run out of memory.
Related
I'm writing a plugin for AutoCAD and want to import all the blocks it will use at the beginning to make sure that they are available when needed. To do that, I use this method
public static void ImportBlocks(string[] filesToTryToImport, string filter = "")
{
foreach (string blockToImport in filesToTryToImport)
{
if (blockToImport.Contains(filter))
{
Database sourceDb = new Database(false, true); //Temporary database to hold data for block we want to import
try
{
sourceDb.ReadDwgFile(blockToImport, System.IO.FileShare.Read, true, ""); //Read the DWG into a side database
ObjectIdCollection blockIds = new ObjectIdCollection(); // Create a variable to store the list of block identifiers
Autodesk.AutoCAD.DatabaseServices.TransactionManager tm = sourceDb.TransactionManager;
using (Transaction myT = tm.StartTransaction())
{
// Open the block table
BlockTable bt = (BlockTable)tm.GetObject(sourceDb.BlockTableId, OpenMode.ForRead, false);
// Check each block in the block table
foreach (ObjectId btrId in bt)
{
BlockTableRecord btr = (BlockTableRecord)tm.GetObject(btrId, OpenMode.ForRead, false);
// Only add named & non-layout blocks to the copy list
if (!btr.IsAnonymous && !btr.IsLayout)
{
blockIds.Add(btrId);
}
btr.Dispose();
}
}
// Copy blocks from source to destination database
IdMapping mapping = new IdMapping();
sourceDb.WblockCloneObjects(blockIds, _database.BlockTableId, mapping, DuplicateRecordCloning.Replace, false);
_editor.WriteMessage("\nCopied " + blockIds.Count.ToString() + " block definitions from " + blockToImport + " to the current drawing.");
}
catch (Autodesk.AutoCAD.Runtime.Exception ex)
{
_editor.WriteMessage("\nError during copy: " + ex.Message);
}
finally
{
sourceDb.Dispose();
}
}
}
}
That method appears to work because it successfully executes. However when I go to insert a block in the drawing via AutoCAD's interface it doesn't show up as an option and when I try to insert it programmatically it throws a FileNotFound exception meaning it didn't work. What's wrong with this method? Thanks in advance!
EDIT: Here is a less complicated method with a test method
public static void ImportSingleBlock(string fileToTryToImport)
{
using (Transaction tr = _database.TransactionManager.StartTransaction())
{
Database sourceDb = new Database(false, true); //Temporary database to hold data for block we want to import
try
{
sourceDb.ReadDwgFile(fileToTryToImport, System.IO.FileShare.Read, true, ""); //Read the DWG into a side database
_database.Insert(fileToTryToImport, sourceDb, false);
_editor.WriteMessage("\nSUCCESS: " + fileToTryToImport);
}
catch (Autodesk.AutoCAD.Runtime.Exception ex)
{
_editor.WriteMessage("\nERROR: " + fileToTryToImport);
}
finally
{
sourceDb.Dispose();
}
tr.Commit();
}
}
[CommandMethod("TESTSINGLEBLOCKIMPORTING")]
public void TestSingleBlockImporting()
{
OpenFileDialog ofd = new OpenFileDialog();
DialogResult result = ofd.ShowDialog();
if (result == DialogResult.Cancel) //Ending method on cancel
{
return;
}
string fileToTryToImport = ofd.FileName;
using (Transaction tr = _database.TransactionManager.StartTransaction())
{
EntityMethods.ImportSingleBlock(fileToTryToImport);
tr.Commit();
}
}
This file is the block I'm trying to import. Hope this inspires someone cause I am desperately lost right now.
Your code is correct and should work. In fact I did tried it and works fine. You're probably missing to Commit() a outer transaction (where you call this ImportBlocs() method). Check:
using (Transaction trans = _database.TransactionManager.StartTransaction())
{
ImportBlocks(... parameters here ...);
trans.Commit(); // remember to call this commit, if omitted, Abort() is assumed
}
I had the same issue, very similar code. Issue was that
_database.Insert(fileToTryToImport, sourceDb, false);
should be
_database.Insert(blockName, sourceDb, false);
You can see that the first parameter has to be "blockName", and not the file path.
I am trying to setup a test platform for a Live application upgrade. In order not to mess up my Live data, I duplicated the database to another database. And update my CR using "Set Datasource Location" and point to the new DB. However, they are still loading records from the live DB! FYI I am connecting using ODBC and I have created a test ODBC for the test platform.
Previously my DC is "osso_odbc", but I have updated it to "ossotest", but it is still loading record from "osso_odbc".
Anybody know what went wrong and what I should do?
Thanks.
In the end what I did is remove all the existing connection and re-create all the form elements. Now it is working.
1 - If "save data in the report" is checked, old data can still be prompt
2 - If you have subreports you must "set datasource location" for subreports, too
3 - "Set datasource Location" is not sointuitive, did you use righr procedure
4 - We are talking about changing datasource in design time, not in asp.net runitme, are we?
I use the following code to programmatically change connection, based on a connectionsting and to pass parameters to the report, based on Querystring.
bool Logon(ReportDocument cr, string server, string db, bool integratedSecurity, string id, string pass)
{
ConnectionInfo ci = new ConnectionInfo();
SubreportObject subObj;
ci.ServerName = server;
ci.DatabaseName = db;
if (integratedSecurity)
ci.IntegratedSecurity = true;
else
{
ci.IntegratedSecurity = false;
ci.UserID = id;
ci.Password = pass;
}
PassParameters();
if (!ApplyLogon(cr, ci))
return false;
for (int i = 0; i < cr.ReportDefinition.ReportObjects.Count; i++)
if (cr.ReportDefinition.ReportObjects[i].Kind == ReportObjectKind.SubreportObject)
{
subObj = (SubreportObject)cr.ReportDefinition.ReportObjects[i];
if (!ApplyLogon(cr.OpenSubreport(subObj.SubreportName), ci))
return false;
}
return true;
}
bool ApplyLogon(ReportDocument cr, ConnectionInfo ci)
{
TableLogOnInfo li;
// for each table apply connection info
for (int i = 0; i < cr.Database.Tables.Count; i++)
{
li = cr.Database.Tables[i].LogOnInfo;
li.ConnectionInfo = ci;
cr.Database.Tables[i].ApplyLogOnInfo(li);
// check if logon was successful
// if TestConnectivity returns false, check
// logon credentials
if (TestConnectivity(cr, i))
{
// drop fully qualified table location
if (cr.Database.Tables[i].Location.IndexOf(".") > 0)
cr.Database.Tables[i].Location = cr.Database.Tables[i].Location.Substring(cr.Database.Tables[i].Location.LastIndexOf(".") + 1);
cr.Database.Tables[i].Location.Substring(cr.Database.Tables[i].Location.LastIndexOf(".") + 1);
}
else return false;
}
return true;
}
private static bool TestConnectivity(ReportDocument cr, int i)
{
bool test = false;
try { test = cr.Database.Tables[i].TestConnectivity(); }
catch (System.Runtime.InteropServices.COMException)
{
try { test = cr.Database.Tables[i].TestConnectivity(); }
catch (System.Runtime.InteropServices.COMException)
{
try { test = cr.Database.Tables[i].TestConnectivity(); }
catch (System.Runtime.InteropServices.COMException)
{ test = cr.Database.Tables[i].TestConnectivity(); }
}
}
return test;
}
private void PassParameters()
{
// char[] subReportPrefix = new char[] { 'P', 'm', '-', '?' };
foreach (ParameterField field in reportDocument.ParameterFields)
{
string fieldName = field.Name.TrimStart('#');//.TrimStart(subReportPrefix);
ParameterValues p = new ParameterValues();
string valu = clearQueryString[fieldName];//cerco in querystring
if (!string.IsNullOrEmpty(valu))//se ho trovato in querystring ok
{
string[] valus = valu.Split('§');//se c'è il separatore sono values sennò è value
if (valus.Length > 1)
{
foreach (string v in valus)
if (p.Count == 0 || !string.IsNullOrEmpty(v))
p.AddValue(v);
}
else
p.AddValue(valu);
field.CurrentValues = p;
}
......
I have the following code to insert a record (I will be inserting 1000's). I get a successful insert but all of the numeric fields are EMPTY in salesforce. The one text field works fine ("Church of Randy") and appears in salesforce. Any ideas what I could be doing wrong?
public void InsertTestRecord()
{
ChurchHist__c ch = new ChurchHist__c();
ch.ChurchId__c = 9999;
ch.ChurchName__c = "Church of Randy";
ch.ReportYear__c = 2013;
ch.ReportMonth__c = 08;
ch.RegisteredMembers__c = 777;
ch.ActiveMembers__c = 111;
ch.InactiveMembers__c = 666;
ch.UsersForMonth__c = 25;
ch.ContribForMonth__c = 789.01;
ch.ContribYTD__c = 200000.02;
ch.AchContrib__c = 200000.00;
ch.CcContrib__c = 0.02;
ch.AchToCc__c = 0.12345;
sObject[] s = new sObject[1];
s[0] = ch;
try
{
SaveResult[] saveResult = binding.create(s);
if (saveResult[0].success)
{
Debug.Print("Success");
}
else
{
foreach (Error error in saveResult[0].errors)
{
Debug.Print(error.statusCode.ToString());
Debug.Print(error.message);
}
}
}
catch (SoapException se)
{
Debug.Print(se.ToString());
}
}
You need to set the specified flags, e.g. numeric property has an associated specified flag that controls if the .NET soap engine will actually send that property over the wire, unfortuantly the setter for the property does not automatically set the specified flag. e.g.
ch.ChurchId__c = 9999;
ch.ChurchId__cSpecified = true;
ch.reportyear__c = 2013;
ch.reportyear__cSpecified = true;
The specified flags are a "feature" of the .NET soap system for certain types. (numbers & dates IIRC).
I have a database that may be on the network drive.
There are two things that I want to achieve:
When the first user connects to it in read-only mode (he doesn't
have a read-write access to the location, or the database is
read-only), other users must use the read-only connection also (even
if they have RW access).
When the first user connects to it in RW mode, others can not
connect to the database at all.
I'm using SQLite, and the concurrency should not be the problem, as the database should never be used by more than 10 people at the same time.
UPDATE: This is a sample that I'm trying to make work, so I could implement it in the program itself. Almost everything can be changed.
UPDATE: Now when I finally understood what #CL. was telling me, I made it work and this is the updated code.
using System.Diagnostics;
using System.Linq;
using System.IO;
using DbSample.Domain;
using DbSample.Infrastructure;
using NHibernate.Linq;
using NHibernate.Util;
namespace DbSample.Console
{
class Program
{
static void Main(string[] args)
{
IDatabaseContext databaseContext = null;
databaseContext = new SqliteDatabaseContext(args[1]);
var connection = LockDB(args[1]);
if (connection == null) return;
var sessionFactory = databaseContext.CreateSessionFactory();
if (sessionFactory != null)
{
int insertCount = 0;
while (true)
{
try
{
using (var session = sessionFactory.OpenSession(connection))
{
string result;
session.FlushMode = NHibernate.FlushMode.Never;
var command = session.Connection.CreateCommand();
command.CommandText = "PRAGMA locking_mode=EXCLUSIVE";
command.ExecuteNonQuery();
using (var transaction = session.BeginTransaction(ReadCommited))
{
bool update = false;
bool delete = false;
bool read = false;
bool readall = false;
int op = 0;
System.Console.Write("\nMenu of the day:\n1: update\n2: delete\n3: read\n4: read all\n0: EXIT\n\nYour choice: ");
op = System.Convert.ToInt32(System.Console.ReadLine());
if (op == 1)
update = true;
else if (op == 2)
delete = true;
else if (op == 3)
read = true;
else if (op == 4)
readall = true;
else if (op == 0)
break;
else System.Console.WriteLine("Are you retarded? Can't you read?");
if (delete)
{
System.Console.Write("Enter the ID of the object to delete: ");
var objectToRemove = session.Get<MyObject>(System.Convert.ToInt32(System.Console.ReadLine()));
if (!(objectToRemove == null))
{
session.Delete(objectToRemove);
System.Console.WriteLine("Deleted {0}, ID: {1}", objectToRemove.MyName, objectToRemove.Id);
deleteCount++;
}
else
System.Console.WriteLine("\nObject not present in the database!\n");
}
else if (update)
{
System.Console.Write("How many objects to add/update? ");
int number = System.Convert.ToInt32(System.Console.ReadLine());
number += insertCount;
for (; insertCount < number; insertCount++)
{
var myObject = session.Get<MyObject>(insertCount + 1);
if (myObject == null)
{
myObject = new MyObject
{
MtName = "Object" + insertCount,
IdLegacy = 0,
};
session.Save(myObject);
System.Console.WriteLine("Added {0}, ID: {1}", myObject.MyName, myObject.Id);
}
else
{
session.Update(myObject);
System.Console.WriteLine("Updated {0}, ID: {1}", myObject.MyName, myObject.Id);
}
}
}
else if (read)
{
System.Console.Write("Enter the ID of the object to read: ");
var objectToRead = session.Get<MyObject>(System.Convert.ToInt32(System.Console.ReadLine()));
if (!(objectToRead == null))
System.Console.WriteLine("Got {0}, ID: {1}", objectToRead.MyName, objectToRead.Id);
else
System.Console.WriteLine("\nObject not present in the database!\n");
}
else if (readall)
{
System.Console.Write("How many objects to read? ");
int number = System.Convert.ToInt32(System.Console.ReadLine());
for (int i = 0; i < number; i++)
{
var objectToRead = session.Get<MyObject>(i + 1);
if (!(objectToRead == null))
System.Console.WriteLine("Got {0}, ID: {1}", objectToRead.MyName, objectToRead.Id);
else
System.Console.WriteLine("\nObject not present in the database! ID: {0}\n", i + 1);
}
}
update = false;
delete = false;
read = false;
readall = false;
transaction.Commit();
}
}
}
catch (System.Exception e)
{
throw e;
}
}
sessionFactory.Close();
}
}
private static SQLiteConnection LockDbNew(string database)
{
var fi = new FileInfo(database);
if (!fi.Exists)
return null;
var builder = new SQLiteConnectionStringBuilder { DefaultTimeout = 1, DataSource = fi.FullName, Version = 3 };
var connectionStr = builder.ToString();
var connection = new SQLiteConnection(connectionStr) { DefaultTimeout = 1 };
var cmd = new SQLiteCommand(connection);
connection.Open();
// try to get an exclusive lock on the database
try
{
cmd.CommandText = "PRAGMA locking_mode = EXCLUSIVE; BEGIN EXCLUSIVE; COMMIT;";
cmd.ExecuteNonQuery();
}
// if we can't get the exclusive lock, it could mean 3 things
// 1: someone else has locked the database
// 2: we don't have a write acces to the database location
// 3: database itself is a read-only file
// So, we try to connect as read-only
catch (Exception)
{
// we try to set the SHARED lock
try
{
// first we clear the locks
cmd.CommandText = "PRAGMA locking_mode = NORMAL";
cmd.ExecuteNonQuery();
cmd.CommandText = "SELECT COUNT(*) FROM MyObject";
cmd.ExecuteNonQuery();
// then set the SHARED lock on the database
cmd.CommandText = "PRAGMA locking_mode = EXCLUSIVE";
cmd.ExecuteNonQuery();
cmd.CommandText = "SELECT COUNT(*) FROM MyObject";
cmd.ExecuteNonQuery();
readOnly = true;
}
catch (Exception)
{
// if we can't set EXCLUSIVE nor SHARED lock, someone else has opened the DB in read-write mode and we can't connect at all
connection.Close();
return null;
}
}
return connection;
}
}
}
Set PRAGMA locking_mode=EXCLUSIVE to prevent SQLite from releasing its locks after a transaction ends.
I don't know if it can be done within db but in application;
You can set a global variable (not sure if it's a web or desktop app) to check if anyone connected and he has a write access or not.
After that you can check the other client's state.
I need to backup database (using SQL Server 2008 R2). Size of db is about 100 GB so I want backup content only of important tables (containing settings) and of course object of all tables, views, triggers etc.
For example:
db: Products
tables: Food, Clothes, Cars
There is too much cars in Cars, so I will only backup table definition (CREATE TABLE ...) and complete Food and Clothes (including its content).
Advise me the best solution, please. I will probably use SMO (if no better solution). Should I use Backup class? Or Scripter class? Or another (if there is any)? Which class can handle my requirements?
I want backup these files to *.sql files, one per table if possible.
I would appreciate code sample. Written in answer or somewhere (post url), but be sure that external article has solution exactly for this kind of problem.
You can use this part of code
ServerConnection connection = new ServerConnection("SERVER,1234", "User", "User1234");
Server server = new Server(connection);
Database database = server.Databases["DbToBackup"];
Using SMO. You will have to play with the options you need.
StringBuilder sb = new StringBuilder();
using (SqlConnection connection = new SqlConnection("connectionString")) {
ServerConnection serverConnection = new ServerConnection(connection);
Server server = new Server(serverConnection);
Database database = server.Databases["databaseName"];
Scripter scripter = new Scripter(server);
scripter.Options.ScriptDrops = false;
scripter.Options.WithDependencies = true;
scripter.Options.ScriptData = true;
Urn[] smoObjects = new Urn[1];
foreach (Table table in database.Tables) {
smoObjects[0] = table.Urn;
if (!table.IsSystemObject) {
foreach (string s in scripter.EnumScript(smoObjects)) {
System.Diagnostics.Debug.WriteLine(s);
sb.AppendLine(s);
}
}
}
}
// Write to *.sql file on disk
File.WriteAllText(#".\backup.sql");
Another easy way to do this is by backing the database to xml files. To do this use a DataTable and call WriteXml and WriteXmlSchema. (You need the schema later on so it can be imported/restored using the same method). This method means you are backing up per table.
private bool BackupTable(string connectionString, string tableName, string directory) {
using (SqlConnection connection = new SqlConnection(connectionString)) {
try {
connection.Open();
}
catch (System.Data.SqlClient.SqlException ex) {
// Handle
return false;
}
using (SqlDataAdapter adapter = new SqlDataAdapter(string.Format("SELECT * FROM {0}", tableName), connection)) {
using (DataTable table = new DataTable(tableName)) {
adapter.Fill(table);
try {
table.WriteXml(Path.Combine(directory, string.Format("{0}.xml", tableName)));
table.WriteXmlSchema(Path.Combine(directory, string.Format("{0}.xsd", tableName)));
}
catch (System.UnauthorizedAccessException ex) {
// Handle
return false;
}
}
}
}
return true;
}
You can later on then push these back into a database us by using ReadXmlSchema and ReadXml, using an adapter to fill and update the table to the database. I assume you are knowledgable in basic CRUD so I shouldn't need to cover that part.
If you want to use SMO, here is a Msdn article on using the Backup and Restore classes to backup and restore a database. The code sample us unformatted, and in VB.NET, but easily translatable.
http://msdn.microsoft.com/en-us/library/ms162133(v=SQL.100).aspx
Lastly, which may be easier, talk to the IT guys and see if they will let you remote in or give you access to do the backup yourself. If you are writing the software and this is a crucial step, speak up and let them know how important it is for you to do this, as this will reduce cost in you having to write a custom tool when great tools already exist. Especially since the database is 100GB, you can use the tools you know already work.
This arcitle was enough informative to solve my problem. Here is my working solution.
I decided script all objects to one file, it's better solution because of dependencies, I think. If there is one table per on file and there is also some dependencies (foreign keys for example) it would script more code than if everything is in one file.
I omitted some parts of code in this sample, like backuping backup files in case wrong database backup. If there is no such a system, all backups will script to one file and it will go messy
public class DatabaseBackup
{
private ServerConnection Connection;
private Server Server;
private Database Database;
private ScriptingOptions Options;
private string FileName;
private const string NoDataScript = "Cars";
public DatabaseBackup(string server, string login, string password, string database)
{
Connection = new ServerConnection(server, login, password);
Server = new Server(Connection);
Database = Server.Databases[database];
}
public void Backup(string fileName)
{
FileName = fileName;
SetupOptions();
foreach (Table table in Database.Tables)
{
if (!table.IsSystemObject)
{
if (NoDataScript.Contains(table.Name))
{
Options.ScriptData = false;
table.EnumScript(Options);
Options.ScriptData = true;
}
else
table.EnumScript(Options);
}
}
}
private void SetupOptions()
{
Options = new ScriptingOptions();
Options.ScriptSchema = true;
Options.ScriptData = true;
Options.ScriptDrops = false;
Options.WithDependencies = true;
Options.Indexes = true;
Options.FileName = FileName;
Options.EnforceScriptingOptions = true;
Options.IncludeHeaders = true;
Options.AppendToFile = true;
}
}
Server databaseServer = default(Server);//DataBase Server Name
databaseServer = new Server("ecrisqlstddev");
string strFileName = #"C:\Images\UltimateSurveyMod_" + DateTime.Today.ToString("yyyyMMdd") + ".sql"; //20120720
if (System.IO.File.Exists(strFileName))
System.IO.File.Delete(strFileName);
List<SqlSmoObject> list = new List<SqlSmoObject>();
Scripter scripter = new Scripter(databaseServer);
Database dbUltimateSurvey = databaseServer.Databases["UltimateSurvey"];//DataBase Name
// Table scripting Writing
DataTable dataTable1 = dbUltimateSurvey.EnumObjects(DatabaseObjectTypes.Table);
foreach (DataRow drTable in dataTable1.Rows)
{
// string strTableSchema = (string)drTable["Schema"];
// if (strTableSchema == "dbo")
// continue;
Table dbTable = (Table)databaseServer.GetSmoObject(new Urn((string)drTable["Urn"]));
if (!dbTable.IsSystemObject)
if (dbTable.Name.Contains("SASTool_"))
list.Add(dbTable);
}
scripter.Server = databaseServer;
scripter.Options.IncludeHeaders = true;
scripter.Options.SchemaQualify = true;
scripter.Options.ToFileOnly = true;
scripter.Options.FileName = strFileName;
scripter.Options.DriAll = true;
scripter.Options.AppendToFile = true;
scripter.Script(list.ToArray()); // Table Script completed
// Stored procedures scripting writing
list = new List<SqlSmoObject>();
DataTable dataTable = dbUltimateSurvey.EnumObjects(DatabaseObjectTypes.StoredProcedure);
foreach (DataRow row in dataTable.Rows)
{
string sSchema = (string)row["Schema"];
if (sSchema == "sys" || sSchema == "INFORMATION_SCHEMA")
continue;
StoredProcedure sp = (StoredProcedure)databaseServer.GetSmoObject(
new Urn((string)row["Urn"]));
if (!sp.IsSystemObject)
if (sp.Name.Contains("custom_"))
list.Add(sp);
}
scripter.Server = databaseServer;
scripter.Options.IncludeHeaders = true;
scripter.Options.SchemaQualify = true;
scripter.Options.ToFileOnly = true;
scripter.Options.FileName = strFileName;
scripter.Options.DriAll = true;
scripter.Options.AppendToFile = true;
scripter.Script(list.ToArray()); // Stored procedures script completed
What you describe is not really a Backup but I understand what your goal is:
Scripter sample code
Using SMO to get create script for table defaults
http://blogs.msdn.com/b/mwories/archive/2005/05/07/basic-scripting.aspx
http://msdn.microsoft.com/en-us/library/microsoft.sqlserver.management.smo.scripter.aspx
http://weblogs.asp.net/shahar/archive/2010/03/03/generating-sql-backup-script-for-tables-amp-data-from-any-net-application-using-smo.aspx
http://en.csharp-online.net/SQL_Server_Management_Objects
http://www.mssqltips.com/sqlservertip/1833/generate-scripts-for-database-objects-with-smo-for-sql-server/
For "Backup" of Data you could load the table content via a Reader into a DataTable and store the result as XML...