I need to backup database (using SQL Server 2008 R2). Size of db is about 100 GB so I want backup content only of important tables (containing settings) and of course object of all tables, views, triggers etc.
For example:
db: Products
tables: Food, Clothes, Cars
There is too much cars in Cars, so I will only backup table definition (CREATE TABLE ...) and complete Food and Clothes (including its content).
Advise me the best solution, please. I will probably use SMO (if no better solution). Should I use Backup class? Or Scripter class? Or another (if there is any)? Which class can handle my requirements?
I want backup these files to *.sql files, one per table if possible.
I would appreciate code sample. Written in answer or somewhere (post url), but be sure that external article has solution exactly for this kind of problem.
You can use this part of code
ServerConnection connection = new ServerConnection("SERVER,1234", "User", "User1234");
Server server = new Server(connection);
Database database = server.Databases["DbToBackup"];
Using SMO. You will have to play with the options you need.
StringBuilder sb = new StringBuilder();
using (SqlConnection connection = new SqlConnection("connectionString")) {
ServerConnection serverConnection = new ServerConnection(connection);
Server server = new Server(serverConnection);
Database database = server.Databases["databaseName"];
Scripter scripter = new Scripter(server);
scripter.Options.ScriptDrops = false;
scripter.Options.WithDependencies = true;
scripter.Options.ScriptData = true;
Urn[] smoObjects = new Urn[1];
foreach (Table table in database.Tables) {
smoObjects[0] = table.Urn;
if (!table.IsSystemObject) {
foreach (string s in scripter.EnumScript(smoObjects)) {
System.Diagnostics.Debug.WriteLine(s);
sb.AppendLine(s);
}
}
}
}
// Write to *.sql file on disk
File.WriteAllText(#".\backup.sql");
Another easy way to do this is by backing the database to xml files. To do this use a DataTable and call WriteXml and WriteXmlSchema. (You need the schema later on so it can be imported/restored using the same method). This method means you are backing up per table.
private bool BackupTable(string connectionString, string tableName, string directory) {
using (SqlConnection connection = new SqlConnection(connectionString)) {
try {
connection.Open();
}
catch (System.Data.SqlClient.SqlException ex) {
// Handle
return false;
}
using (SqlDataAdapter adapter = new SqlDataAdapter(string.Format("SELECT * FROM {0}", tableName), connection)) {
using (DataTable table = new DataTable(tableName)) {
adapter.Fill(table);
try {
table.WriteXml(Path.Combine(directory, string.Format("{0}.xml", tableName)));
table.WriteXmlSchema(Path.Combine(directory, string.Format("{0}.xsd", tableName)));
}
catch (System.UnauthorizedAccessException ex) {
// Handle
return false;
}
}
}
}
return true;
}
You can later on then push these back into a database us by using ReadXmlSchema and ReadXml, using an adapter to fill and update the table to the database. I assume you are knowledgable in basic CRUD so I shouldn't need to cover that part.
If you want to use SMO, here is a Msdn article on using the Backup and Restore classes to backup and restore a database. The code sample us unformatted, and in VB.NET, but easily translatable.
http://msdn.microsoft.com/en-us/library/ms162133(v=SQL.100).aspx
Lastly, which may be easier, talk to the IT guys and see if they will let you remote in or give you access to do the backup yourself. If you are writing the software and this is a crucial step, speak up and let them know how important it is for you to do this, as this will reduce cost in you having to write a custom tool when great tools already exist. Especially since the database is 100GB, you can use the tools you know already work.
This arcitle was enough informative to solve my problem. Here is my working solution.
I decided script all objects to one file, it's better solution because of dependencies, I think. If there is one table per on file and there is also some dependencies (foreign keys for example) it would script more code than if everything is in one file.
I omitted some parts of code in this sample, like backuping backup files in case wrong database backup. If there is no such a system, all backups will script to one file and it will go messy
public class DatabaseBackup
{
private ServerConnection Connection;
private Server Server;
private Database Database;
private ScriptingOptions Options;
private string FileName;
private const string NoDataScript = "Cars";
public DatabaseBackup(string server, string login, string password, string database)
{
Connection = new ServerConnection(server, login, password);
Server = new Server(Connection);
Database = Server.Databases[database];
}
public void Backup(string fileName)
{
FileName = fileName;
SetupOptions();
foreach (Table table in Database.Tables)
{
if (!table.IsSystemObject)
{
if (NoDataScript.Contains(table.Name))
{
Options.ScriptData = false;
table.EnumScript(Options);
Options.ScriptData = true;
}
else
table.EnumScript(Options);
}
}
}
private void SetupOptions()
{
Options = new ScriptingOptions();
Options.ScriptSchema = true;
Options.ScriptData = true;
Options.ScriptDrops = false;
Options.WithDependencies = true;
Options.Indexes = true;
Options.FileName = FileName;
Options.EnforceScriptingOptions = true;
Options.IncludeHeaders = true;
Options.AppendToFile = true;
}
}
Server databaseServer = default(Server);//DataBase Server Name
databaseServer = new Server("ecrisqlstddev");
string strFileName = #"C:\Images\UltimateSurveyMod_" + DateTime.Today.ToString("yyyyMMdd") + ".sql"; //20120720
if (System.IO.File.Exists(strFileName))
System.IO.File.Delete(strFileName);
List<SqlSmoObject> list = new List<SqlSmoObject>();
Scripter scripter = new Scripter(databaseServer);
Database dbUltimateSurvey = databaseServer.Databases["UltimateSurvey"];//DataBase Name
// Table scripting Writing
DataTable dataTable1 = dbUltimateSurvey.EnumObjects(DatabaseObjectTypes.Table);
foreach (DataRow drTable in dataTable1.Rows)
{
// string strTableSchema = (string)drTable["Schema"];
// if (strTableSchema == "dbo")
// continue;
Table dbTable = (Table)databaseServer.GetSmoObject(new Urn((string)drTable["Urn"]));
if (!dbTable.IsSystemObject)
if (dbTable.Name.Contains("SASTool_"))
list.Add(dbTable);
}
scripter.Server = databaseServer;
scripter.Options.IncludeHeaders = true;
scripter.Options.SchemaQualify = true;
scripter.Options.ToFileOnly = true;
scripter.Options.FileName = strFileName;
scripter.Options.DriAll = true;
scripter.Options.AppendToFile = true;
scripter.Script(list.ToArray()); // Table Script completed
// Stored procedures scripting writing
list = new List<SqlSmoObject>();
DataTable dataTable = dbUltimateSurvey.EnumObjects(DatabaseObjectTypes.StoredProcedure);
foreach (DataRow row in dataTable.Rows)
{
string sSchema = (string)row["Schema"];
if (sSchema == "sys" || sSchema == "INFORMATION_SCHEMA")
continue;
StoredProcedure sp = (StoredProcedure)databaseServer.GetSmoObject(
new Urn((string)row["Urn"]));
if (!sp.IsSystemObject)
if (sp.Name.Contains("custom_"))
list.Add(sp);
}
scripter.Server = databaseServer;
scripter.Options.IncludeHeaders = true;
scripter.Options.SchemaQualify = true;
scripter.Options.ToFileOnly = true;
scripter.Options.FileName = strFileName;
scripter.Options.DriAll = true;
scripter.Options.AppendToFile = true;
scripter.Script(list.ToArray()); // Stored procedures script completed
What you describe is not really a Backup but I understand what your goal is:
Scripter sample code
Using SMO to get create script for table defaults
http://blogs.msdn.com/b/mwories/archive/2005/05/07/basic-scripting.aspx
http://msdn.microsoft.com/en-us/library/microsoft.sqlserver.management.smo.scripter.aspx
http://weblogs.asp.net/shahar/archive/2010/03/03/generating-sql-backup-script-for-tables-amp-data-from-any-net-application-using-smo.aspx
http://en.csharp-online.net/SQL_Server_Management_Objects
http://www.mssqltips.com/sqlservertip/1833/generate-scripts-for-database-objects-with-smo-for-sql-server/
For "Backup" of Data you could load the table content via a Reader into a DataTable and store the result as XML...
Related
I had my database and applications (web, api & service bus that all create crystal reports at runtime) on the same server. All worked perfect creating reports at runtime.
Moved the database to a different server, which only allow remote connections from the app server.
For my database connections, all I had to do in my projects was change the server in the connection string from (local) to the ip address of the database server, and it all works fine.
However it seems that to just change the server from (local) to the ip address for the crystal reports does not work (give a "Database logon failed" error)
I'm not sure if this is the problem, but for creating the report locally (before uploading the .rpt to the server) I had to create a connection where the server is set to "local" ( in the data source location). Since I cannot access the new database remotely from my local machine, I cannot change that)
The code I use look as follow:
string server = ConfigurationManager.AppSettings["Server"];
string database = ConfigurationManager.AppSettings["Database"];
string user = ConfigurationManager.AppSettings["DatabaseUser"];
string password = ConfigurationManager.AppSettings["DatabasePassword"];
var report = new ReportClass {FileName = reportPath};
report.Load();
report.SetDatabaseLogon(user, password, server, database);
var parameterValue = new ParameterDiscreteValue {Value = item.Id};
var parameter = new ParameterValues {parameterValue};
report.DataDefinition.ParameterFields["#id"].ApplyCurrentValues(parameter);
report.ExportToDisk(ExportFormatType.PortableDocFormat, path);
Thank you, I ended up with this that worked (using sql)
Essentially the idea is setting the logon info not just for the report, but also for all subreports. Would have been nice if one could just do a "foreach subreport in report.." ,but seems not to work that way
public static void SetConnections(ReportDocument report)
{
Database database = report.Database;
Tables tables = database.Tables;
var crConnInfo = new ConnectionInfo
{
ServerName = ConfigurationManager.AppSettings["Server"],
DatabaseName = ConfigurationManager.AppSettings["Database"],
UserID = ConfigurationManager.AppSettings["DatabaseUser"],
Password = ConfigurationManager.AppSettings["DatabasePassword"]
};
foreach (Table table in tables)
{
TableLogOnInfo crLogOnInfo = table.LogOnInfo;
crLogOnInfo.ConnectionInfo = crConnInfo;
table.ApplyLogOnInfo(crLogOnInfo);
}
Sections crSections = report.ReportDefinition.Sections;
foreach (Section crSection in crSections)
{
ReportObjects crReportObjects = crSection.ReportObjects;
foreach (ReportObject crReportObject in crReportObjects)
{
if (crReportObject.Kind == ReportObjectKind.SubreportObject)
{
var crSubreportObject = (SubreportObject)crReportObject;
ReportDocument subRepDoc = crSubreportObject.OpenSubreport(crSubreportObject.SubreportName);
Database crDatabase = subRepDoc.Database;
Tables crTables = crDatabase.Tables;
foreach (Table crTable in crTables)
{
TableLogOnInfo crLogOnInfo = crTable.LogOnInfo;
crLogOnInfo.ConnectionInfo = crConnInfo;
crTable.ApplyLogOnInfo(crLogOnInfo);
}
}
}
}
}
I use Crystal report on oracle database, in my application i use this code:
CrystalDecisions.CrystalReports.Engine.ReportDocument rpt = new CrystalDecisions.CrystalReports.Engine.ReportDocument();
//reset connection
DBController.LogonEx(DBAlias, "", DBUsername, DBPassword);
//Create the QE (query engine) propertybag with the provider details and logon property bag
CrystalDecisions.ReportAppServer.DataDefModel.PropertyBag QE_Details = new CrystalDecisions.ReportAppServer.DataDefModel.PropertyBag();
ConnectionInfo ci = DBController.GetConnectionInfos(null)[0];
//copy from existing attributes except for server name
for (int idx = 0; idx < ci.Attributes.PropertyIDs.Count; idx++)
{
switch (ci.Attributes.PropertyIDs[idx])
{
case "QE_ServerDescription":
QE_Details.Add(ci.Attributes.PropertyIDs[idx], DBAlias);
break;
case "QE_LogonProperties":
//this is itself a property bag
CrystalDecisions.ReportAppServer.DataDefModel.PropertyBag logonDetails = new CrystalDecisions.ReportAppServer.DataDefModel.PropertyBag();
PropertyBag OLDLogon = (PropertyBag)ci.Attributes[ci.Attributes.PropertyIDs[idx]];
for (int idx2 = 0; idx2 < OLDLogon.PropertyIDs.Count; idx2++)
{
switch (OLDLogon.PropertyIDs[idx2])
{
case "Server":
case "Data Source":
logonDetails.Add(OLDLogon.PropertyIDs[idx2], DBAlias);
break;
default:
logonDetails.Add(OLDLogon.PropertyIDs[idx2], OLDLogon[OLDLogon.PropertyIDs[idx2]]);
break;
}
}
QE_Details.Add(ci.Attributes.PropertyIDs[idx], logonDetails);
break;
default:
QE_Details.Add(ci.Attributes.PropertyIDs[idx], ci.Attributes[ci.Attributes.PropertyIDs[idx]]);
break;
}
}
//now replace all existing connections with new one
CrystalDecisions.ReportAppServer.DataDefModel.ConnectionInfo newConnInfo = new CrystalDecisions.ReportAppServer.DataDefModel.ConnectionInfo();
newConnInfo.Attributes = QE_Details;
newConnInfo.Kind = CrystalDecisions.ReportAppServer.DataDefModel.CrConnectionInfoKindEnum.crConnectionInfoKindCRQE;
newConnInfo.UserName = DBUsername;
newConnInfo.Password = DBPassword;
foreach (CrystalDecisions.ReportAppServer.DataDefModel.ConnectionInfo oldConnInfo in DBController.GetConnectionInfos(null))
{
DBController.ReplaceConnection(oldConnInfo, newConnInfo.Clone(true), null, CrystalDecisions.ReportAppServer.DataDefModel.CrDBOptionsEnum.crDBOptionDoNotVerifyDB);
}
i hope this help
have look at below codes
Here is my _cSynchronization Class where Sync Function are precent,
(500) in connection string means timeout = 500
public static class _cSynchronization
{
public static int transactionCount;
public static uint BatchSize = 10000;
public static uint MemorySize = 20000;
public static List<string> _MGetAllTableList()
{
List<string> list = new List<string>();
DataRowCollection _dr = _CObjectsofClasses._obj_CDatabase._MGetDataRows("Select TABLE_NAME From INFORMATION_SCHEMA.TABLES WHERE TABLE_NAME <> N'AUTOBACKUPSET' AND TABLE_NAME <> N'BINDATA' AND TABLE_NAME <> N'_ATTENDANCESTATUS' AND TABLE_NAME NOT like '%_tracking%' AND TABLE_TYPE ='BASE TABLE' AND TABLE_NAME <> N'schema_info' AND TABLE_NAME <> N'scope_info' AND TABLE_NAME <> N'scope_config' AND TABLE_NAME <> '_CLIENTNAME' AND TABLE_NAME <> '_TABSETTING' AND TABLE_NAME <> '_EMPLOYEEPAYMENT1' AND TABLE_NAME <> '_LOCALCOMPANYINFO' ORDER BY TABLE_NAME");
int a = 0;
string x = "";
if (_dr.Count > 0)
{
_CPubVar._value_I = 0;
_CPubVar._MaxValue_I = _dr.Count + 2;
_CPubVar._IsTableProcess_bool = true;
foreach (DataRow _row in _dr)
{
_CPubVar._value_I++;
_CPubVar._ProcessText_S = "Preparing Tables " + _CPubVar._value_I + " of " + _CPubVar._MaxValue_I;
x = _CObjectsofClasses._obj_CConvert._MConvertToString(_row[0]);
// serverConn.Open();
list.Add(x);
}
}
return list;
}
public static void SetUp(string _pTableName)
{
// Connection to SQL Server database
SqlConnection serverConn = new SqlConnection(_CObjectsofClasses._obj_CConnectionString._MGetServerConnectionString(500));
// Connection to SQL client database
SqlConnection clientConn = new SqlConnection(_CObjectsofClasses._obj_CConnectionString._MGetConnectionString(500));
// Create a scope named "product" and add tables to it.
DbSyncScopeDescription productScope = new DbSyncScopeDescription(_pTableName + "_SCOP");
// Select the colums to be included in the Collection Object
// Define the Products table.
DbSyncTableDescription productDescription =
SqlSyncDescriptionBuilder.GetDescriptionForTable(_pTableName,serverConn);
// Add the Table to the scope object.
productScope.Tables.Add(productDescription);
// Create a provisioning object for "product" and apply it to the on-premise database if one does not exist.
SqlSyncScopeProvisioning serverProvision = new SqlSyncScopeProvisioning(serverConn, productScope);
serverProvision.ObjectSchema = ".dbo";
//
serverProvision.SetCreateProceduresForAdditionalScopeDefault(DbSyncCreationOption.Create);
serverProvision.SetCreateTableDefault(DbSyncCreationOption.Skip);
serverProvision.SetCreateProceduresDefault(DbSyncCreationOption.CreateOrUseExisting);
serverProvision.SetCreateTrackingTableDefault(DbSyncCreationOption.CreateOrUseExisting);
serverProvision.SetCreateTriggersDefault(DbSyncCreationOption.CreateOrUseExisting);
if (!serverProvision.ScopeExists(_pTableName + "_SCOP"))
serverProvision.Apply();
// Provision the SQL client database from the on-premise SQL Server database if one does not exist.
SqlSyncScopeProvisioning clientProvision = new SqlSyncScopeProvisioning(clientConn, productScope);
if (!clientProvision.ScopeExists(_pTableName + "_SCOP"))
clientProvision.Apply();
// Shut down database connections.
serverConn.Close();
serverConn.Dispose();
clientConn.Close();
clientConn.Dispose();
}
public static List<_CSyncDetails> Synchronize(string _pScopeName, SyncDirectionOrder _pDirection)
{
// Connection to SQL Server database
SqlConnection serverConn = new SqlConnection(_CObjectsofClasses._obj_CConnectionString._MGetServerConnectionString(500));
// Connection to SQL client database
SqlConnection clientConn = new SqlConnection(_CObjectsofClasses._obj_CConnectionString._MGetConnectionString(500));
List<_CSyncDetails> _Statics = new List<_CSyncDetails>();
// Perform Synchronization between SQL Server and the SQL client.
SyncOrchestrator syncOrchestrator = new SyncOrchestrator();
// Create provider for SQL Server
SqlSyncProvider serverProvider = new SqlSyncProvider(_pScopeName, serverConn);
// Set the command timeout and maximum transaction size for the SQL Azure provider.
SqlSyncProvider clientProvider = new SqlSyncProvider(_pScopeName, clientConn);
clientProvider.CommandTimeout = serverProvider.CommandTimeout = 500;
//Set memory allocation to the database providers
clientProvider.MemoryDataCacheSize = serverProvider.MemoryDataCacheSize = MemorySize;
//Set application transaction size on destination provider.
serverProvider.ApplicationTransactionSize = BatchSize;
//Count transactions
serverProvider.ChangesApplied += new EventHandler<DbChangesAppliedEventArgs>(RemoteProvider_ChangesApplied);
// Set Local provider of SyncOrchestrator to the server provider
syncOrchestrator.LocalProvider = serverProvider;
// Set Remote provider of SyncOrchestrator to the client provider
syncOrchestrator.RemoteProvider = clientProvider;
// Set the direction of SyncOrchestrator session to Upload and Download
syncOrchestrator.Direction = _pDirection;
// Create SyncOperations Statistics Object
SyncOperationStatistics syncStats = syncOrchestrator.Synchronize();
_Statics.Add(new _CSyncDetails { UploadChangesTotal = syncStats.UploadChangesTotal, SyncStartTime = syncStats.SyncStartTime, DownloadChangesTotal = syncStats.DownloadChangesTotal, SyncEndTime = syncStats.SyncEndTime });
// Display the Statistics
// Shut down database connections.
serverConn.Close();
serverConn.Dispose();
clientConn.Close();
clientConn.Dispose();
return _Statics;
}
}
Here the function where I am sync
private void _MSync()
{
_CPubVar._IsContinue = true;
_CPubVar._PausebtnCondition = 0;
// _cSynchronization._MClearSyncprovision();
_CPubVar._Stop_bool = false;
this.Text += " - Started at : " + DateTime.Now;
string a = "";
// Define the Products table.
List<string> _Tablelist = new List<string>();
Collection<string> _ColNames = new Collection<string>();
_list1.Add(new _CSyncDetails { SyncStartTime = DateTime.Now });
_Tablelist.AddRange(_cSynchronization._MGetAllTableList());
SyncDirectionOrder _order = SyncDirectionOrder.Download;
_CPubVar._MaxValue_I = (_Tablelist.Count * 2);
_CPubVar._value_I = 0;
foreach (string tbl in _Tablelist)
{
try
{
a = Regex.Replace(Environment.MachineName + Environment.UserName, #"[^0-9a-zA-Z]+", "").ToUpper() + "_" + tbl + "_SCOPE";
_CPubVar._value_I++;
_CPubVar._ProcessText_S = "Sync Tables " + _CPubVar._value_I + " of " + _CPubVar._MaxValue_I;
_cSynchronization.SetUp(tbl);
if (_CPubVar._IsServerRunning_bool)
{
_order = SyncDirectionOrder.DownloadAndUpload;
}
else
{
if (tbl == "_BANK" || tbl == "_BANKACCOUNT" || tbl == "_CLIENTNAME" || tbl == "_PACKAGE" || tbl == "_PACKAGEDET" || tbl == "_PAYMENTEXPENCES" || tbl == "_PROJECT" || tbl == "_PROJECTDET" || tbl == "_REQUIREMENT" || tbl == "_REQUIREMENTDET" || tbl == "_SERVER" || tbl == "_UNIT" || tbl == "_ITEM" || tbl == "ManageUser" || tbl == "USERPERMISSION" || tbl == "USERROLE" || tbl == "USERROLEDET")
{
_order = SyncDirectionOrder.DownloadAndUpload;
}
else
{
_order = SyncDirectionOrder.Download;
}
}
_CPubVar._value_I++;
_CPubVar._ProcessText_S = "Sync Tables " + _CPubVar._value_I + " of " + _CPubVar._MaxValue_I;
if (tbl != "_COMPANYINFO")
{
_list1.AddRange(_cSynchronization.Synchronize(tbl + "_SCOP", _order));
}
else
{
if (_CPubVar._IsServerRunning_bool)
{
_list1.AddRange(_cSynchronization.Synchronize(tbl + "_SCOP", SyncDirectionOrder.DownloadAndUpload));
}
}
}
catch (Exception exx)
{
_syncErr.Add(new _CSyncErrors { SyncErrorDateTime = DateTime.Now, SyncErrorMessage = exx.Message, SyncTableAlies = _CTableName._MgetTableAlies(tbl) });
pictureBox1.Visible = label3.Visible = true;
label3.Text = _syncErr.Count.ToString();
Application.DoEvents();
continue;
}
}
thread.Abort();
}
Problem :
Above codes are working fine for only one PC Sync at a time (let Take A) there is no error and done.
one PC Sync at a time (let Take B) there is no error and done.
But when I am trying to run Application simultaneously (A and B Same time) then for some table I am getting
Cannot enumerate changes at the RelationalSyncProvider for table 'TableName'
Running Status
PC A PC B Result
YES NO No Error
NO YES No Error
YES YES Error
Please Note That On Client side MY database is 2008 and Server side MY database is 2012
Where I am wrong
UPDATE :
I have 72 tables in database and below Error is not specific 'TableName' it may be any table from 72,
For Example Table1 gives me Error, and after Sync done if I rerun application may be this error not come.
Cannot enumerate changes at the RelationalSyncProvider for table 'TableName'
Check the Timeout
Eight minutes might not be enough time. Try increasing the synchronization command timeout to an absurd number to find out if that's the problem.
clientProvider.CommandTimeout = 3000;
serverProvider.CommandTimeout = 3000;
Turn on Tracing
Edit the app.config file for your application by adding the following system.diagnostics segment. It will log verbosely to C:\MySyncTrace.txt.
<configuration>
<system.diagnostics>
<switches>
<!--4-verbose.-->
<add name="SyncTracer" value="4" />
</switches>
<trace autoflush="true">
<listeners>
<add name="TestListener"
type="System.Diagnostics.TextWriterTraceListener"
initializeData="c:\MySyncTrace.txt"/>
</listeners>
</trace>
</system.diagnostics>
</configuration>
My Recreation
I tried to recreate the error that you are experiencing. I created a simplified version of what you are trying to accomplish. It builds and successfully synchronizes two SqlExpress databases.
Unfortunately, I wasn't able to recreate the error. Here is the setup that I used and the test cases afterwards.
Mock Databases Create Script
USE Master;
GO
IF EXISTS (
SELECT *
FROM sys.databases
WHERE NAME = 'SyncTestServer'
)
DROP DATABASE SyncTestServer
GO
CREATE DATABASE SyncTestServer;
GO
CREATE TABLE SyncTestServer.dbo.Table1 (Column1 VARCHAR(50) PRIMARY KEY)
CREATE TABLE SyncTestServer.dbo.Table2 (Column1 VARCHAR(50) PRIMARY KEY)
INSERT INTO SyncTestServer.dbo.Table1 (Column1)
VALUES ('Server Data in Table1')
INSERT INTO SyncTestServer.dbo.Table2 (Column1)
VALUES ('Server Data in Table2')
IF EXISTS (
SELECT *
FROM sys.databases
WHERE NAME = 'SyncTestClient'
)
DROP DATABASE SyncTestClient
GO
CREATE DATABASE SyncTestClient;
GO
CREATE TABLE SyncTestClient.dbo.Table1 (Column1 VARCHAR(50) PRIMARY KEY)
CREATE TABLE SyncTestClient.dbo.Table2 (Column1 VARCHAR(50) PRIMARY KEY)
INSERT INTO SyncTestClient.dbo.Table1 (Column1)
VALUES ('Client Data in Table1')
INSERT INTO SyncTestClient.dbo.Table2 (Column1)
VALUES ('Client Data in Table2')
Mock Console Application
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using Microsoft.Synchronization.Data;
using Microsoft.Synchronization.Data.SqlServer;
using Microsoft.Synchronization;
namespace StackOverflow_SyncFramework
{
public class _CSyncDetails
{
public int UploadChangesTotal;
public DateTime SyncStartTime;
public int DownloadChangesTotal;
public DateTime SyncEndTime;
}
public class Program
{
static void Main(string[] args)
{
_cSynchronization sync = new _cSynchronization();
sync._MSync();
Console.ReadLine();
}
}
public class _cSynchronization
{
public static int transactionCount;
public static uint BatchSize = 10000;
public static uint MemorySize = 20000;
public const string ServerConnString =
#"Data Source=.\SQLExpress;initial catalog=SyncTestServer;integrated security=True;MultipleActiveResultSets=True;";
public const string ClientConnString =
#"Data Source=.\SQLExpress;initial catalog=SyncTestClient;integrated security=True;MultipleActiveResultSets=True;";
public static List<string> _MGetAllTableList()
{
// I just created two databases that each have the following table
// Synchronization is working
List<string> list = new List<string>()
{
"Table1",
"Table2"
};
return list;
}
public static void SetUp(string _pTableName)
{
// Connection to SQL Server database
SqlConnection serverConn =
new SqlConnection(ServerConnString);
// Connection to SQL client database
SqlConnection clientConn =
new SqlConnection(ClientConnString);
// Create a scope named "product" and add tables to it.
Console.WriteLine(_pTableName);
DbSyncScopeDescription productScope = new DbSyncScopeDescription(_pTableName + "_SCOP");
// Define the Products table.
DbSyncTableDescription productDescription =
SqlSyncDescriptionBuilder.GetDescriptionForTable(_pTableName, serverConn);
// Add the Table to the scope object.
productScope.Tables.Add(productDescription);
// Create a provisioning object for "product" and apply it to the on-premise database if one does not exist.
SqlSyncScopeProvisioning serverProvision = new SqlSyncScopeProvisioning(serverConn, productScope);
serverProvision.ObjectSchema = ".dbo";
serverProvision.SetCreateProceduresForAdditionalScopeDefault(DbSyncCreationOption.Create);
serverProvision.SetCreateTableDefault(DbSyncCreationOption.Skip);
serverProvision.SetCreateProceduresDefault(DbSyncCreationOption.CreateOrUseExisting);
serverProvision.SetCreateTrackingTableDefault(DbSyncCreationOption.CreateOrUseExisting);
serverProvision.SetCreateTriggersDefault(DbSyncCreationOption.CreateOrUseExisting);
if (!serverProvision.ScopeExists(_pTableName + "_SCOP"))
serverProvision.Apply();
// Provision the SQL client database from the on-premise SQL Server database if one does not exist.
SqlSyncScopeProvisioning clientProvision = new SqlSyncScopeProvisioning(clientConn, productScope);
if (!clientProvision.ScopeExists(_pTableName + "_SCOP"))
clientProvision.Apply();
// Shut down database connections.
serverConn.Close();
serverConn.Dispose();
clientConn.Close();
clientConn.Dispose();
}
public static List<_CSyncDetails> Synchronize(string _pScopeName, SyncDirectionOrder _pDirection)
{
// Connection to SQL Server database
SqlConnection serverConn =
new SqlConnection(ServerConnString);
// Connection to SQL client database
SqlConnection clientConn =
new SqlConnection(ClientConnString);
List<_CSyncDetails> _Statics = new List<_CSyncDetails>();
// Perform Synchronization between SQL Server and the SQL client.
SyncOrchestrator syncOrchestrator = new SyncOrchestrator();
// Create provider for SQL Server
SqlSyncProvider serverProvider = new SqlSyncProvider(_pScopeName, serverConn);
// Set the command timeout and maximum transaction size for the SQL Azure provider.
SqlSyncProvider clientProvider = new SqlSyncProvider(_pScopeName, clientConn);
clientProvider.CommandTimeout = serverProvider.CommandTimeout = 500;
//Set memory allocation to the database providers
clientProvider.MemoryDataCacheSize = serverProvider.MemoryDataCacheSize = MemorySize;
//Set application transaction size on destination provider.
serverProvider.ApplicationTransactionSize = BatchSize;
//Count transactions
serverProvider.ChangesApplied +=
new EventHandler<DbChangesAppliedEventArgs>(RemoteProvider_ChangesApplied);
// Set Local provider of SyncOrchestrator to the server provider
syncOrchestrator.LocalProvider = serverProvider;
// Set Remote provider of SyncOrchestrator to the client provider
syncOrchestrator.RemoteProvider = clientProvider;
// Set the direction of SyncOrchestrator session to Upload and Download
syncOrchestrator.Direction = _pDirection;
// Create SyncOperations Statistics Object
SyncOperationStatistics syncStats = syncOrchestrator.Synchronize();
_Statics.Add(new _CSyncDetails
{
UploadChangesTotal = syncStats.UploadChangesTotal,
SyncStartTime = syncStats.SyncStartTime,
DownloadChangesTotal = syncStats.DownloadChangesTotal,
SyncEndTime = syncStats.SyncEndTime
});
// Shut down database connections.
serverConn.Close();
serverConn.Dispose();
clientConn.Close();
clientConn.Dispose();
return _Statics;
}
private static void RemoteProvider_ChangesApplied(object sender, DbChangesAppliedEventArgs e)
{
Console.WriteLine("Changes Applied");
}
public void _MSync()
{
// Define the Products table.
List<string> _Tablelist = new List<string>();
_Tablelist.AddRange(_cSynchronization._MGetAllTableList());
foreach (string tbl in _Tablelist)
{
SetUp(tbl);
_cSynchronization.Synchronize(tbl + "_SCOP", SyncDirectionOrder.DownloadAndUpload);
}
}
}
}
Select Statements for Before and After Tests
SELECT *
FROM SyncTestServer.dbo.Table1
SELECT *
FROM SyncTestServer.dbo.Table2
SELECT *
FROM SyncTestClient.dbo.Table1
SELECT *
FROM SyncTestClient.dbo.Table2
Before the Sync
This was the DB state before the sync.
After Sync
This was the state afterward. So, the sync appears to have worked.
Reattempted with a Remove Server and Three Concurrent Syncs
This is the connection string for the remote DB.
public const string ServerConnString =
#"data source=x0x0x0x0x0x.database.windows.net,1433;initial catalog=SyncTestServer01;user id=xoxoxox#xoxoxox;password=xoxoxoxoxox;MultipleActiveResultSets=True;";
This is the modification to mimic three concurrent synchronizations.
public class Program
{
public static string[] ClientConnString = new string[]
{
#"Data Source=.\SQLExpress;initial catalog=SyncTestClient01;integrated security=True;MultipleActiveResultSets=True;"
,#"Data Source=.\SQLExpress;initial catalog=SyncTestClient02;integrated security=True;MultipleActiveResultSets=True;"
,#"Data Source=.\SQLExpress;initial catalog=SyncTestClient03;integrated security=True;MultipleActiveResultSets=True;"
};
static void Main(string[] args)
{
foreach (var connString in ClientConnString)
{
Action action = () =>
{
_cSynchronization sync = new _cSynchronization();
sync._MSync(connString);
};
Task.Factory.StartNew(action);
}
Console.ReadLine();
}
}
I'm afraid I wasn't able to recreate the error that you experienced. Please turn on tracing and then post the tracing results to your questions. That way we can analyze the trace and see what's problematic.
I have asp.net mvc4 project, where have database with students, also have old database in .xls format and I need to migrate all values from old database into new one sql server database. In my mvc project I have membership controller which include method for Register new student. Also I'm write console application which parse .xls table and insert all values into my new database via Register method.
Console application:
static void Main(string[] args)
{
string con = #"Provider=Microsoft.ACE.OLEDB.12.0;Data Source=D:\MigrateExelSql\Include\TestDb.xlsx; Extended Properties=Excel 12.0;";
MembershipController mc = new MembershipController();
Student student = new Student();
student.Username = null;
student.Password = "password";
student.Email = null;
student.EntryYear = 2014;
student.PassportNumber = 0;
student.IsApproved = true;
student.PasswordFailuresSinceLastSuccess = 0;
student.IsLockedOut = false;
using(OleDbConnection connection = new OleDbConnection(con))
{
connection.Open();
OleDbCommand command = new OleDbCommand("select * from [Sheet1$]", connection);
using(OleDbDataReader dr = command.ExecuteReader())
{
while(dr.Read())
{
string row1Col0 = dr[0].ToString();
Console.WriteLine(row1Col0);
string row1Col1 = dr[1].ToString();
Console.WriteLine(row1Col1);
Console.WriteLine();
student.Username = row1Col0;
student.Email = row1Col1;
try
{
mc.Register(student);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
}
}
Console.ReadKey();
}
Register method
public static MembershipCreateStatus Register(string Username, string Password, string Email, bool IsApproved, string FirstName, string LastName)
{
MembershipCreateStatus CreateStatus;
System.Web.Security.Membership.CreateUser(Username, Password, Email, null, null, IsApproved, null, out CreateStatus);
if (CreateStatus == MembershipCreateStatus.Success)
{
using (UniversityContext Context = new UniversityContext(ConfigurationManager.ConnectionStrings[0].ConnectionString))
{
Student User = Context.Students.FirstOrDefault(Usr => Usr.Username == Username);
User.FirstName = FirstName;
User.LastName = LastName;
Context.SaveChanges();
}
if (IsApproved)
{
FormsAuthentication.SetAuthCookie(Username, false);
}
}
return CreateStatus;
}
ActionResult POST
public ActionResult Register(Student student)
{
Register(student.Username, student.Password, student.Email, true, student.FirstName, student.LastName);
return RedirectToAction("Index", "Membership");
}
Everything work fine when I try to add students from web application, but when I try to add from my parsed database it was get the error on the next line
System.Web.Security.Membership.CreateUser(Username, Password, Email, null, null, IsApproved, null, out CreateStatus);
in my Register method, that invalidAnswer. I'm change null to "123456789", then next error will be invalidQuestion, then I'm also change from null to "123456789". And after this it's tell me that my invalidPassword. Also I'm added next line into my membership provider's line in my web.config requiresQuestionAndAnswer="false".
I have no idea why it's not working. Does anybody have any ideas?
Whenever I'm required to do anything like import data from spreadsheets like this simply write an Importer console app in my solution. I also use LINQ to CSV which you can get from Nuget and its a really useful tool to use.
It allows you to read in the data from your spreadsheet into the relevant class objects, and from there once you have validated each object you can simply create new objects in your Database using your Data Layer.
Since xls (not xlsx) files are basically text files in which contents are separated by usually a tab character (or maybe some certain spercial charchters) a better approach would be to read all the lines in the file and store them in a list.
Then you can split each element at the time you are inserting them in your database. This can be easily done using a foreach loop like:
foreach (var i in xlsList) {
String s [] = i.Split('\t').ToArray(); // refer to your file
for (int j = 0; j < s.lenght; j++) {
Your SQL statement in here
}
}
I'm creating a program to generate my database schema using the SMO libraries distributed with SQL Server 2008.
I've gotten the scripter outputting code which is virtually the same as SQL Server Management Studio outputs when it's configured to output everything, but with one curious exception: it doesn't output comment headers for the foreign key constraints it generates at the bottom, whereas SSMS does. Can anyone figure out why this is? Here's my code:
private void btnExportScript_Click(object sender, EventArgs ea) {
Server srv = setupConnection();
// Reference the database
if (!srv.Databases.Contains(cbChooseDb.SelectedItem.ToString())) {
_utils.ShowError("Couldn't find DB '" + cbChooseDb.SelectedItem.ToString() + "'.");
return;
}
Database db = srv.Databases[cbChooseDb.SelectedItem.ToString()];
StringBuilder builder = new StringBuilder();
try {
Scripter scrp = new Scripter(srv);
scrp.Options.AppendToFile = false;
scrp.Options.ToFileOnly = false;
scrp.Options.ScriptDrops = false; // Don't script DROPs
scrp.Options.Indexes = true; // Include indexes
scrp.Options.DriAllConstraints = true; // Include referential constraints in the script
scrp.Options.Triggers = true; // Include triggers
scrp.Options.FullTextIndexes = true; // Include full text indexes
scrp.Options.NonClusteredIndexes = true; // Include non-clustered indexes
scrp.Options.NoCollation = false; // Include collation
scrp.Options.Bindings = true; // Include bindings
scrp.Options.SchemaQualify = true; // Include schema qualification, eg. [dbo]
scrp.Options.IncludeDatabaseContext = false;
scrp.Options.AnsiPadding = true;
scrp.Options.FullTextStopLists = true;
scrp.Options.IncludeIfNotExists = false;
scrp.Options.ScriptBatchTerminator = true;
scrp.Options.ExtendedProperties = true;
scrp.Options.ClusteredIndexes = true;
scrp.Options.FullTextCatalogs = true;
scrp.Options.SchemaQualifyForeignKeysReferences = true;
scrp.Options.XmlIndexes = true;
scrp.Options.IncludeHeaders = true;
// Prefectching may speed things up
scrp.PrefetchObjects = true;
var urns = new List<Urn>();
// Iterate through the tables in database and script each one.
foreach (Table tb in db.Tables) {
if (tb.IsSystemObject == false) {
// Table is not a system object, so add it.
urns.Add(tb.Urn);
}
}
// Iterate through the views in database and script each one. Display the script.
foreach (Microsoft.SqlServer.Management.Smo.View view in db.Views) {
if (view.IsSystemObject == false) {
// View is not a system object, so add it.
urns.Add(view.Urn);
}
}
// Iterate through the stored procedures in database and script each one. Display the script.
foreach (StoredProcedure sp in db.StoredProcedures) {
if (sp.IsSystemObject == false) {
// Procedure is not a system object, so add it.
urns.Add(sp.Urn);
}
}
// Start by manually adding DB context
builder.AppendLine("USE [" + db.Name + "]");
builder.AppendLine("GO");
System.Collections.Specialized.StringCollection sc = scrp.Script(urns.ToArray());
foreach (string st in sc) {
// It seems each string is a sensible batch, and putting GO after it makes it work in tools like SSMS.
// Wrapping each string in an 'exec' statement would work better if using SqlCommand to run the script.
builder.Append(st.Trim(new char[] { '\r', '\n' }) + "\r\nGO\r\n");
}
}
catch (Exception ex) {
showExceptionError("Couldn't generate script.", ex);
return;
}
try {
File.WriteAllText(txtExportToFile.Text, builder.ToString());
_utils.ShowInfo("DB exported to script at: " + txtExportToFile.Text);
}
catch (Exception ex) {
showExceptionError("Couldn't save script file.", ex);
return;
}
}
Note that foreign keys fall under the category of DRI constraints, and are scripted because of scrp.Options.DriAllConstraints = true;.
I have a solution here: Can't get EnumScript() to generate constraints
for some reason, Scripter won't emit DRI constraints (foreign keys, etc) when it's given a list of Urns, but it will if it's given one Urn at a time. The trick there is to give the Urns in the order of their ancestry: tables have to be defined before they're referenced by constraints. To do that I've used the DependencyWalker.
Here's a synopsis:
var urns = new List<Urn>();
Scripter schemaScripter = new Scripter(srv) { Options = schemaOptions };
Scripter insertScripter = new Scripter(srv) { Options = insertOptions };
var dw = new DependencyWalker(srv);
foreach (Table t in db.Tables)
if (t.IsSystemObject == false)
urns.Add(t.Urn);
DependencyTree dTree = dw.DiscoverDependencies(urns.ToArray(), true);
DependencyCollection dColl = dw.WalkDependencies(dTree);
foreach (var d in dColl)
{
foreach (var s in schemaScripter.Script(new Urn[] { d.Urn }))
strings.Add(s);
strings.Add("GO");
if (scriptData)
{
int n = 0;
foreach (var i in insertScripter.EnumScript(new Urn[] {d.Urn}))
{
strings.Add(i);
if ((++n) % 100 == 0)
strings.Add("GO");
}
}
}
Note: adding a "GO" every so often keeps the batch size small so SSMS doesn't run out of memory.
Is it possible to set the connection string of crystal report from the properties.settings of a winform in c# like the following?
this is just my assumptions
rpt.connectionString = Properties.Settings.Default.ConnectionString
This is my code for managing logins/connectionstrings (dbLogin is just a simple class for storing information, you can replace that with string values).
//Somewhere in my code:
foreach (CrystalDecisions.CrystalReports.Engine.Table tbCurrent in rdCurrent.Database.Tables)
SetTableLogin(tbCurrent);
//My set login method
private void SetTableLogin(CrystalDecisions.CrystalReports.Engine.Table table)
{
CrystalDecisions.Shared.TableLogOnInfo tliCurrent = table.LogOnInfo;
tliCurrent.ConnectionInfo.UserID = dbLogin.Username;
tliCurrent.ConnectionInfo.Password = dbLogin.Password;
if(dbLogin.Database != null)
tliCurrent.ConnectionInfo.DatabaseName = dbLogin.Database; //Database is not needed for Oracle & MS Access
if(dbLogin.Server != null)
tliCurrent.ConnectionInfo.ServerName = dbLogin.Server;
table.ApplyLogOnInfo(tliCurrent);
}
In case you already have a successfull connectiont to SQL Server, you may use the following static method to set your report connection based on your SqlConnection:
using System.Text;
using CrystalDecisions.Shared;
using System.Data.SqlClient;
namespace StackOverflow
{
public class MyCrystalReports
{
// This method will allow you may easily set report datasource based on your current SqlServerConnetion
public static void SetSqlConnection(CrystalDecisions.CrystalReports.Engine.ReportClass MyReport, SqlConnection MySqlConnection)
{
// You may even test SqlConnection before using it.
SqlConnectionStringBuilder SqlConnectionStringBuilder = new SqlConnectionStringBuilder(MySqlConnection.ConnectionString);
string ServerName = SqlConnectionStringBuilder.DataSource;
string DatabaseName = SqlConnectionStringBuilder.InitialCatalog;
Boolean IntegratedSecurity = SqlConnectionStringBuilder.IntegratedSecurity;
string UserID = SqlConnectionStringBuilder.UserID;
string Password = SqlConnectionStringBuilder.Password;
// Of course, you may add extra settings here :D
// On Crystal Reports, connection must be set individually for each table defined on the report document
foreach (CrystalDecisions.CrystalReports.Engine.Table Table in MyReport.Database.Tables)
{
CrystalDecisions.Shared.TableLogOnInfo TableLogOnInfo = Table.LogOnInfo;
TableLogOnInfo.ConnectionInfo.ServerName = ServerName;
TableLogOnInfo.ConnectionInfo.DatabaseName = DatabaseName;
TableLogOnInfo.ConnectionInfo.IntegratedSecurity = IntegratedSecurity;
if (IntegratedSecurity != true)
{
TableLogOnInfo.ConnectionInfo.UserID = UserID;
TableLogOnInfo.ConnectionInfo.Password = Password;
}
Table.ApplyLogOnInfo(TableLogOnInfo);
}
}
}
}