Search Results

Search found 5779 results on 232 pages for 'backup restoration'.

Page 41/232 | < Previous Page | 37 38 39 40 41 42 43 44 45 46 47 48  | Next Page >

  • add a from to backup routine

    - by Gerard Flynn
    hi how do you put a process bar and button onto this code i have class and want to add a gui on to the code using System; using System.Collections.Generic; using System.ComponentModel; using System.Data; using System.Drawing; using System.Text; using System.Windows.Forms; using System.Data.SqlClient; using System.IO; using System.Threading; using Tamir.SharpSsh; using System.Security.Cryptography; using ICSharpCode.SharpZipLib.Checksums; using ICSharpCode.SharpZipLib.Zip; using ICSharpCode.SharpZipLib.GZip; namespace backup { public partial class Form1 : Form { public Form1() { InitializeComponent(); } /// <summary> /// Summary description for Class1. /// </summary> public class Backup { private string dbName; private string dbUsername; private string dbPassword; private static string baseDir; private string backupName; private static bool isBackup; private string keyString; private string ivString; private string[] backupDirs = new string[0]; private string[] excludeDirs = new string[0]; private ZipOutputStream zipOutputStream; private string backupFile; private string zipFile; private string encryptedFile; static void Main() { Backup.Log("BackupUtility loaded"); try { new Backup(); if (!isBackup) MessageBox.Show("Restore complete"); } catch (Exception e) { Backup.Log(e.ToString()); if (!isBackup) MessageBox.Show("Error restoring!\r\n" + e.Message); } } private void LoadAppSettings() { this.backupName = System.Configuration.ConfigurationSettings.AppSettings["BackupName"].ToString(); this.dbName = System.Configuration.ConfigurationSettings.AppSettings["DBName"].ToString(); this.dbUsername = System.Configuration.ConfigurationSettings.AppSettings["DBUsername"].ToString(); this.dbPassword = System.Configuration.ConfigurationSettings.AppSettings["DBPassword"].ToString(); //default to using where we are executing this assembly from Backup.baseDir = System.Reflection.Assembly.GetExecutingAssembly().Location.Substring(0, System.Reflection.Assembly.GetExecutingAssembly().Location.LastIndexOf("\\")) + "\\"; Backup.isBackup = bool.Parse(System.Configuration.ConfigurationSettings.AppSettings["IsBackup"].ToString()); this.keyString = System.Configuration.ConfigurationSettings.AppSettings["KeyString"].ToString(); this.ivString = System.Configuration.ConfigurationSettings.AppSettings["IVString"].ToString(); this.backupDirs = GetSetting("BackupDirs", ','); this.excludeDirs = GetSetting("ExcludeDirs", ','); } private string[] GetSetting(string settingName, char delimiter) { if (System.Configuration.ConfigurationSettings.AppSettings[settingName] != null) { string settingVal = System.Configuration.ConfigurationSettings.AppSettings[settingName].ToString(); if (settingVal.Length > 0) return settingVal.Split(delimiter); } return new string[0]; } public Backup() { this.LoadAppSettings(); if (isBackup) this.DoBackup(); else this.DoRestore(); Log("Finished"); } private void DoRestore() { System.Windows.Forms.OpenFileDialog fileDialog = new System.Windows.Forms.OpenFileDialog(); fileDialog.Title = "Choose .encrypted file"; fileDialog.Filter = "Encrypted files (*.encrypted)|*.encrypted|All files (*.*)|*.*"; fileDialog.InitialDirectory = Backup.baseDir; if (fileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) { //string encryptedFile = GetFileName("encrypted"); string encryptedFile = fileDialog.FileName; string decryptedFile = this.GetDecryptedFilename(encryptedFile); //string originalFile = GetFileName("original"); this.Decrypt(encryptedFile, decryptedFile); //this.UnzipFile(decryptedFile, originalFile); } } //use the same filename as the backup except replace ".encrypted" with ".decrypted.zip" private string GetDecryptedFilename(string encryptedFile) { string name = encryptedFile.Substring(0, encryptedFile.LastIndexOf(".")); name += ".decrypted.zip"; return name; } private void DoBackup() { this.backupFile = GetFileName("bak"); this.zipFile = GetFileName("zip"); this.encryptedFile = GetFileName("encrypted"); this.DeleteFiles(); this.zipOutputStream = new ZipOutputStream(File.Create(zipFile)); try { //backup database first if (this.dbName.Length > 0) { this.BackupDB(backupFile); this.ZipFile(backupFile, this.GetName(backupFile)); } //zip any directories specified in config file this.ZipUserSpecifiedFilesAndDirectories(this.backupDirs); } finally { this.zipOutputStream.Finish(); this.zipOutputStream.Close(); } this.Encrypt(zipFile, encryptedFile); this.SCPFile(encryptedFile); this.DeleteFiles(); } /// <summary> /// Deletes any files created by the backup process, namely the DB backup file, /// the zip of all files backuped up, and the encrypred zip file /// </summary> private void DeleteFiles() { File.Delete(this.backupFile); File.Delete(this.zipFile); ///File.Delete(this.encryptedFile); } private void ZipUserSpecifiedFilesAndDirectories(string[] fileNames) { foreach (string fileName in fileNames) { string name = fileName.Trim(); if (name.Length > 0) { Log("Zipping " + name); this.ZipFile(name, this.GetNameFromDir(name)); } } } private void SCPFile(string inputPath) { string sshServer = System.Configuration.ConfigurationSettings.AppSettings["SSHServer"].ToString(); string sshUsername = System.Configuration.ConfigurationSettings.AppSettings["SSHUsername"].ToString(); string sshPassword = System.Configuration.ConfigurationSettings.AppSettings["SSHPassword"].ToString(); if (sshServer.Length > 0 && sshUsername.Length > 0 && sshPassword.Length > 0) { Scp scp = new Scp(sshServer, sshUsername, sshPassword); //Copy a file from local machine to remote SSH server scp.Connect(); Log("Connected to " + sshServer); //scp.Put(inputPath, "/home/wal/temp.txt"); scp.Put(inputPath, GetName(inputPath)); scp.Close(); } else { Log("Not SCP as missing login details"); } } private string GetName(string inputPath) { FileInfo info = new FileInfo(inputPath); return info.Name; } private string GetNameFromDir(string inputPath) { DirectoryInfo info = new DirectoryInfo(inputPath); return info.Name; } private static void Log(string msg) { try { string toLog = DateTime.Now.ToString() + ": " + msg; System.Diagnostics.Debug.WriteLine(toLog); System.IO.FileStream fs = new System.IO.FileStream(baseDir + "app.log", System.IO.FileMode.OpenOrCreate, System.IO.FileAccess.ReadWrite); System.IO.StreamWriter m_streamWriter = new System.IO.StreamWriter(fs); m_streamWriter.BaseStream.Seek(0, System.IO.SeekOrigin.End); m_streamWriter.WriteLine(toLog); m_streamWriter.Flush(); m_streamWriter.Close(); fs.Close(); } catch (Exception e) { Console.WriteLine(e.ToString()); } } private byte[] GetFileBytes(string path) { FileStream stream = new FileStream(path, FileMode.Open); byte[] bytes = new byte[stream.Length]; stream.Read(bytes, 0, bytes.Length); stream.Close(); return bytes; } private void WriteFileBytes(byte[] bytes, string path) { FileStream stream = new FileStream(path, FileMode.Create); stream.Write(bytes, 0, bytes.Length); stream.Close(); } private void UnzipFile(string inputPath, string outputPath) { ZipInputStream zis = new ZipInputStream(File.OpenRead(inputPath)); ZipEntry theEntry = zis.GetNextEntry(); FileStream streamWriter = File.Create(outputPath); int size = 2048; byte[] data = new byte[2048]; while (true) { size = zis.Read(data, 0, data.Length); if (size > 0) { streamWriter.Write(data, 0, size); } else { break; } } streamWriter.Close(); zis.Close(); } private bool ExcludeDir(string dirName) { foreach (string excludeDir in this.excludeDirs) { if (dirName == excludeDir) return true; } return false; } private void ZipFile(string inputPath, string zipName) { FileAttributes fa = File.GetAttributes(inputPath); if ((fa & FileAttributes.Directory) != 0) { string dirName = zipName + "/"; ZipEntry entry1 = new ZipEntry(dirName); this.zipOutputStream.PutNextEntry(entry1); string[] subDirs = Directory.GetDirectories(inputPath); //create directories first foreach (string subDir in subDirs) { DirectoryInfo info = new DirectoryInfo(subDir); string name = info.Name; if (this.ExcludeDir(name)) Log("Excluding " + dirName + name); else this.ZipFile(subDir, dirName + name); } //then store files string[] fileNames = Directory.GetFiles(inputPath); foreach (string fileName in fileNames) { FileInfo info = new FileInfo(fileName); string name = info.Name; this.ZipFile(fileName, dirName + name); } } else { Crc32 crc = new Crc32(); this.zipOutputStream.SetLevel(6); // 0 - store only to 9 - means best compression FileStream fs = null; try { fs = File.OpenRead(inputPath); } catch (IOException ioEx) { Log("WARNING! " + ioEx.Message);//might be in use, skip file in this case } if (fs != null) { byte[] buffer = new byte[fs.Length]; fs.Read(buffer, 0, buffer.Length); ZipEntry entry = new ZipEntry(zipName); entry.DateTime = DateTime.Now; // set Size and the crc, because the information // about the size and crc should be stored in the header // if it is not set it is automatically written in the footer. // (in this case size == crc == -1 in the header) // Some ZIP programs have problems with zip files that don't store // the size and crc in the header. entry.Size = fs.Length; fs.Close(); crc.Reset(); crc.Update(buffer); entry.Crc = crc.Value; this.zipOutputStream.PutNextEntry(entry); this.zipOutputStream.Write(buffer, 0, buffer.Length); } } } private void Encrypt(string inputPath, string outputPath) { RijndaelManaged rijndaelManaged = new RijndaelManaged(); byte[] encrypted; byte[] toEncrypt; //Create a new key and initialization vector. //myRijndael.GenerateKey(); //myRijndael.GenerateIV(); /*des.GenerateKey(); des.GenerateIV(); string temp1 = Convert.ToBase64String(des.Key); string temp2 = Convert.ToBase64String(des.IV);*/ //Get the key and IV. byte[] key = Convert.FromBase64String(keyString); byte[] IV = Convert.FromBase64String(ivString); //Get an encryptor. ICryptoTransform encryptor = rijndaelManaged.CreateEncryptor(key, IV); //Encrypt the data. MemoryStream msEncrypt = new MemoryStream(); CryptoStream csEncrypt = new CryptoStream(msEncrypt, encryptor, CryptoStreamMode.Write); //Convert the data to a byte array. toEncrypt = this.GetFileBytes(inputPath); //Write all data to the crypto stream and flush it. csEncrypt.Write(toEncrypt, 0, toEncrypt.Length); csEncrypt.FlushFinalBlock(); //Get encrypted array of bytes. encrypted = msEncrypt.ToArray(); WriteFileBytes(encrypted, outputPath); } private void Decrypt(string inputPath, string outputPath) { RijndaelManaged myRijndael = new RijndaelManaged(); //DES des = new DESCryptoServiceProvider(); byte[] key = Convert.FromBase64String(keyString); byte[] IV = Convert.FromBase64String(ivString); byte[] encrypted = this.GetFileBytes(inputPath); byte[] fromEncrypt; //Get a decryptor that uses the same key and IV as the encryptor. ICryptoTransform decryptor = myRijndael.CreateDecryptor(key, IV); //Now decrypt the previously encrypted message using the decryptor // obtained in the above step. MemoryStream msDecrypt = new MemoryStream(encrypted); CryptoStream csDecrypt = new CryptoStream(msDecrypt, decryptor, CryptoStreamMode.Read); fromEncrypt = new byte[encrypted.Length]; //Read the data out of the crypto stream. int bytesRead = csDecrypt.Read(fromEncrypt, 0, fromEncrypt.Length); byte[] readBytes = new byte[bytesRead]; Array.Copy(fromEncrypt, 0, readBytes, 0, bytesRead); this.WriteFileBytes(readBytes, outputPath); } private string GetFileName(string extension) { return baseDir + backupName + "_" + DateTime.Now.ToString("yyyyMMdd") + "." + extension; } private void BackupDB(string backupPath) { string sql = @"DECLARE @Date VARCHAR(300), @Dir VARCHAR(4000) --Get today date SET @Date = CONVERT(VARCHAR, GETDATE(), 112) --Set the directory where the back up file is stored SET @Dir = '"; sql += backupPath; sql += @"' --create a 'device' to write to first EXEC sp_addumpdevice 'disk', 'temp_device', @Dir --now do the backup BACKUP DATABASE " + this.dbName; sql += @" TO temp_device WITH FORMAT --Drop the device EXEC sp_dropdevice 'temp_device' "; //Console.WriteLine("sql="+sql); Backup.Log("Starting backup of " + this.dbName); ExecuteSQL(sql); } /// <summary> /// Executes the specified SQL /// Returns true if no errors were encountered during execution /// </summary> /// <param name="procedureName"></param> private void ExecuteSQL(string sql) { SqlConnection conn = new SqlConnection(this.GetDBConnectString()); try { SqlCommand comm = new SqlCommand(sql, conn); conn.Open(); comm.ExecuteNonQuery(); } finally { conn.Close(); } } private string GetDBConnectString() { StringBuilder builder = new StringBuilder(); builder.Append("Data Source=127.0.0.1; User ID="); builder.Append(this.dbUsername); builder.Append("; Password="); builder.Append(this.dbPassword); builder.Append("; Initial Catalog="); builder.Append(this.dbName); builder.Append(";Connect Timeout=30"); return builder.ToString(); } } } }

    Read the article

  • Can't get rsync over sftp to work

    - by Patrik
    I'm trying to set up a backup system from an Ubuntu server to a Synology NAS (DS413j) using rsync and sftp. I have created a user for this that we can call ubuntu-backup. I have a directory in ubuntu-backup home directory called www where the backup will be saved. I have enabled Network Backup in DSM The user ubuntu-backup has full access to it's home directory Here is my rsync config file on the Synology NAS: #motd file = /etc/rsyncd.motd #log file = /var/log/rsyncd.log pid file = /var/run/rsyncd.pid lock file = /var/run/rsync.lock use chroot = no [NetBackup] path = /var/services/NetBackup comment = Network Backup Share uid = root gid = root read only = no list = yes charset = utf-8 auth users = root secrets file = /etc/rsyncd.secrets [ubuntu-backup] path = /volume1/homes/ubuntu-backup/www comment = Ubuntu Backup uid = ubuntu-backup gid = users read only = false auth users = ubuntu-backup secrets file = /etc/rsyncd.secrets The permissions on /volume1/homes/ubuntu-backup/www is ubuntu-backup:users 777 Here is the command i'm running. rsync -aHvhiPb /var/www/ [email protected]:./ The result: sending incremental file list ERROR: module is read only rsync error: syntax or usage error (code 1) at main.c(1034) [Receiver=3.0.9] rsync: connection unexpectedly closed (9 bytes received so far) [sender] rsync error: error in rsync protocol data stream (code 12) at io.c(605) [sender=3.0.9] If I'm running this: rsync -aHvhiPb /var/www/ [email protected] It looks like its sending files. No errors. But I cant find anything on the NAS.

    Read the article

  • lftp make-backup not functioning as expected

    - by Felipe Alvarez
    With default settings, when 'putting' the file, it is clobbered without complaint or warning. When 'getting' lftp complains: get: super.sh: file already exists and xfer:clobber is unset I change my /etc/lftp.conf and append: set xfer:make-backup yes set xfer:clobber yes When putting and getting, the files get clobbered, however no backup is made. I've checked the settings with "set -a | grep clob" and "set -a | grep backup" and the values are correct.

    Read the article

  • CentOS Backup BASH Script

    - by user1062058
    I just wrote this script for backing up everything into a tar.gz file. Does it look okay? How can I get the tar file to transfer itself over to another server after executing? FTP from itself? I'm going to put this script into a weekly cron. #!/bin/bash rm ~/backup.tar.gz #removes old backup BACKUP_DIRS=$HOME #$HOME is builtin, it goes to /home/ and all child dirs tar -cvzf backup.tar.gz $BACKUP_DIRS # run tar -zxvf to extract backup.tar.gz Thanks.

    Read the article

  • Windows server backup question

    - by serveradminguy
    Hi, Is it possible to make a backup of my Windows Server from the built-in Microsoft tool, but as long as my Hyper-V backups are stored safely and not backed up anywhere, I can still restore my Windows Server from the native backup and use the Hyper-v machines? So if I lost my C:\ and my VMs are stored remotely, I can restore from an earlier backup and use those VMs.

    Read the article

  • How do I restore my system from a "Backup and Restore Center" backup?

    - by Daniel R Hicks
    The Windows (Vista) documentation and available online info is comprehensively vague. If I have a moderately brain dead system and want to restore it, and I have a "Backup and Restore Center" backup whose "delta" is not quite a week old (but with a "full backup" behind it), what steps do I go through to recover my box back to that backup point? It's totally unclear whether simply doing "restore all" from the (advanced) "Center" is sufficient, or do I need to first take the box back to day zero with the system restore DVD, et al? (Just editing this to get my correct ID associated with it.)

    Read the article

  • Powershell (sqlps) lastbackupdate not changing despite having run a sqlserver backup

    - by user1666376
    I'm using Powershell to check last backup times across all our sqlserver databases. This seems to work really well, but I've got a question If I run this (a cut-down version of the actual script): dir SQLSERVER:\SQL\Server1\default\databases | select parent, name, lastbackupdate I get: Parent Name LastBackupDate ------ ---- -------------- [Server1] ADBA 10/09/2012 21:15:37 [Server1] ReportServer 10/09/2012 21:00:17 [Server1] ReportServerTempDB 10/09/2012 21:00:18 [Server1] db1 10/09/2012 21:15:35 If I then run a sql backup of the Server1 default instance, and run the same query the last backup date doesn't change: PS C:\temp> dir SQLSERVER:\SQL\Server1\default\databases | select parent, name, lastbackupdate Parent Name LastBackupDate ------ ---- -------------- [Server1] ADBA 10/09/2012 21:15:37 [Server1] ReportServer 10/09/2012 21:00:17 [Server1] ReportServerTempDB 10/09/2012 21:00:18 [Server1] db1 10/09/2012 21:15:35 ..but if I open a new powershell window, it shows the backup I just took: PS SQLSERVER:\> dir SQLSERVER:\SQL\Server1\default\databases | select parent, name, lastbackupdate Parent Name LastBackupDate ------ ---- -------------- [server1] ADBA 12/09/2012 09:03:23 [server1] ReportServer 12/09/2012 08:48:03 [server1] ReportServerTempDB 12/09/2012 08:48:04 [server1] db1 12/09/2012 09:03:21 My guess is that this is expected behaviour, but could anybody show me where it's documented/explained - I just want to understand what's going on. This is running the SQlps which came with 2008, against a 2008 instance. Thanks Matt

    Read the article

  • SQL Server: "This filegroup cannot be used as a backup destination" error when attempting restore

    - by Ariel
    When running a command like the following: "RESTORE FILELISTONLY FROM DISK='\\\\server\\folder\\DummyDB.bak'" I'm getting this error: Backup destination "\\server" supports a FILESTREAM filegroup. This filegroup cannot be used as a backup destination. Rerun the BACKUP statement with a valid backup destination. RESTORE FILELIST is terminating abnormally. Unless someone comes up with a better idea, it seems that the drive from which restore is being attempted must not contain any database file contained in a filegroup. Is that the case? Thanks in advance.

    Read the article

  • rsync over ssh backup failing after relocation of server

    - by OlduvaiHand
    I've got two FreeBSD machines set up; one serves video data and the other is the backup for the first. At this point I've got around 4TB of data. I add files to the video server a few at a time, and was planning to use rsync over ssh to keep the backup machine up to date. I did the initial, large backup with both machines hooked up to the same subnet at the lab with no problems using rsync. Then, when I moved the backup machine off-site (but still on the university network), I attempted a sync without changing anything other than the IP (as the machine is now on a different subnet) and got the following error: 2010/03/22 15:55:21 [1260] rsync: connection unexpectedly closed (6340840244 bytes received so far) [receiver] 2010/03/22 15:55:21 [1260] rsync error: error in rsync protocol data stream (code 12) at io.c(601) [receiver=3.0.7] 2010/03/22 15:55:21 [1258] rsync: connection unexpectedly closed (60 bytes received so far) [generator] 2010/03/22 15:55:21 [1258] rsync error: unexplained error (code 255) at io.c(601) [generator=3.0.7] The script that handles the backup hasn't been changed, nor has the crontab that invokes it. Does anyone have any ideas about what might be causing the hiccup? I was under the impression that it might have something to do with the ssh connection timing out or something along those lines, but am not entirely clear on how to diagnose the cause of the problem.

    Read the article

  • Could not retrieve backup settings for primary ID in Log shipping

    - by user1723139
    I am doing log shipping between two Amazon EC2 instances running Windows Server 2008 R2 with SQL Server 2008 R2 standard edition. Both the instances are in the same domain and I can access the shared folders between the instances. The SQL server service account, agent service account are all running under a domain account. When I activate log shipping (with stand by mode restore in secondary server), the initial backup gets restored on the secondary. After that the backup operation is getting failed and i get the following error message: *** Error: Could not retrieve backup settings for primary ID 'xxxxxx-xxxx-xxxx-xxxx-4d772cd7337e'.(Microsoft.SqlServer.Management.LogShipping) *** *** Error: Failed to connect to server IP-0A7653F2.(Microsoft.SqlServer.ConnectionInfo) *** ****** Error: A network-related or instance-specific error occurred while establishing a connection to SQL Server.******** **The server was not found or was not accessible. Verify that the instance name is correct and that SQL Server is configured to allow remote connections. (provider: Named Pipes Provider, error: 40 - Could not open a connection to SQL Server)(.Net SqlClient Data Provider) *** **----- END OF TRANSACTION LOG BACKUP -----**** Any ideas?

    Read the article

  • Backup Exec 12.5 or 2010? [closed]

    - by Chris Thorpe
    Backup Exec 2010 has just dropped, and I'm about to implement a new BEWS infrastructure, complete with CALs and new central servers. When I specced this up last year, I ignored 2010 and focused on Backup Exec 12.5, since it's a mature product. In previous experience, major released of BE had numerous technical issues and seemed to improve significantly at the first service pack. However, our refresh cycle on the backup infrastructure is slow, the main driver usually being lack of support for some new server type (in this case, ESX has driven our current upgrade need). With this in mind, I'm wondering if Backup Exec 2010 should be my first choice, as it'll last longer under current support than 12.5, which will approach EOL soon. Has anyone got any perspective they could add to this? Right now, I'm leaning towards biting the bullet and going with 2010.

    Read the article

  • BTrFS subvolume / snapshot question

    - by bumbling fool
    I think I'm having difficulty fully understanding subvolumes and snapshots. The /home partition is btrfs. I want to create a "backup" snapshot of /home/user (for example) but user has existed for years (previously ext4 btrfs-convert). I believe you can only make a snapshot of a subvolume. I checked and there are no "default" subvolumes already present. 1) Is there another way for me to backup /home/user other than creating a subvolume /home/user2 and copying everything from user to user2 in order to snapshot it?

    Read the article

  • Cloud storage that works with rsnapshot?

    - by humbledude
    I’ve started using rsnapshot as my backup system for my home PC. I really like the idea of hard links and how they are handled. But I can’t find the best workflow. Currently I keep my snapshots on the same partition and will copy the newest snapshot to a pen-drive at the end of the week. Cloud storage is what I’m looking for. Dropbox doesn’t fit my needs, because there is no way to make Dropbox respect hard links — all snapshots are treated as full snapshots. Renting a server is pretty expensive, so my question is, are there better alternatives for backup in the cloud? I would like to benefit from hard links and send only incremental backups, just like I do with my local host.

    Read the article

  • Exchange backup size larger then file size

    - by bladefist
    My backupexec is setup to integrate with exchange, to backup the information store, versus just backing up the data file path. My exchange mdbdata folder is 17 gigs. But my backup exec is backing up 40 gigs worth of data. I have gone through it a million times, and it's strictly backing up exchange information store. I deleted all my backups and started over, to clear the incremental backup old data. Where is all this extra data coming from?

    Read the article

  • server 2008 r2 - wbadmin systemstatebackup - system writer not found in the backup

    - by TWood
    I am trying to manually run a systemstatebackup command on my server 2008 r2 box and I am getting an error code '2155347997' when I view the backup event log details. The command line tells me that I have log files written to the c:\windows\logs\windowsserverbackup\ path but I have no files of the .log type there. My command window tells me "System Writer is not found in the backup". However when I run vssadmin list writers I find System Writer in the list and it shows normal status with no last errors stored. I am running this from an elevated command prompt as well as from a logged on administrator account. My backup target path has permission for network service to have full control and it has plenty of free space. Looking in eventlog I have two VSS error 8194 that happen immediately before the Backup error 517 which has the errorcode 2155347997 listed. All three of these errors are a result of trying to run the command for the systemstatebackup. It's my belief that some VSS related permission is failing and exiting the backup process before it ever gets started. Because of this the initial code that creates the log files must not be running and this is why I have no files. When running the systemstatebackup command from the command prompt and watching the windowsserverbackup directory I do see that I have a Wbadmin.0.etl file which gets created but it is deleted when the backup errors out and stops. I have looked online and there are numerous opinions as to the cause of this error. These are the things I have corrected to try and fix this issue before posting here: Machine runs a HP 1410i smart array controller but at one time also used a LSI scsi card. Used networkadminkb.com's kb# a467 to find one LSI_SCSI entry in HKLMSysCurrentControlSetServices which start was set to 0x0 and I modified to 0x3. No changes. In HKLMSystemCurrentControlSetServicesVSSDiag I gave network service full control where it previously only had "Special Permission". No changes. I followed KB2009272 to manually try to fix system writer. These are all of the things I have tried. What else should I look at to resolve this issue? It may be important to note that I run Mozy Pro on this server and that was known in the past to use VSS for copying operations and it occasionally threw an error. However since an update last year those error event log entries have stopped.

    Read the article

  • Can only connect to file server on second attempt

    - by Ross Fleming
    I have a FreeNas file server on my local network and I usually connect to it from Windows and Ubuntu computers. Ever since I have upgraded from Ubuntu 12.04 to 12.10 Ubuntu will only connect after a second attempt. By which I mean, I will browse to it via the file manager and once I click on the link in "Bookmarks" it complains that it could not connect. If I then try again it connects successfully and will keep up it's connection until the laptop is suspended or looses connection to the LAN for whatever reason. This isn't much of a problem as I don't mind having to click twice but my real problem is that this means that my scheduled backup will complain that it cannot connect to the storage device if it has not already been accessed during the current session. If there is some way to either stop the issue all together or to force the backup tool (default) to immediately have a second attempt at connecting.

    Read the article

  • Why are Back In Time snapshots so large?

    - by Chethan S.
    I just backed up the contents of my home partition onto my external hard drive using Back In Time. I browsed to the backed up contents in the external drive and under properties it showed me the size as 9.6 GB. As I read that in next snapshots I create, Back In Time does not backup everything but creates hard links for older contents and saves newer contents, I wanted to test it. So I copied two small files into my home partition and ran 'Take Snapshot' again. The operation completed within a minute - first it checked previous snapshot, assessed the changes, detected two new files and synced them. After this when I browsed to the backed up contents, I was surprised to see the newer and older backup taking up 9.6 GB each. Isn't this a waste of hard drive space? Or did I interpret something wrongly?

    Read the article

  • Alternative Windows Offline Files + Windows Backup + Previous Version Setup

    - by Herson
    Currently our documents are all hosted in a Windows 7 box. Users can access the files using Windows share and the documents are available offline (windows 7 feature). The documents are being backed up daily by Windows 7 backup and restore utility. Users can access previous versions of the file (from the backups) using Windows Explorer "previous versions" feature. This setup is currently working well, except for the following: We would prefer to have access to hourly versions of the file, not daily. The previous version mechanism is tied up to the backup mechanism. Windows 7 performs a full backup every week and incremental backup everyday. The previous versions of a file is actually what are the available in the backups. If you 20GB documents and want to maintain at least three(3) year history, you will use at minimum 3 years * 52 weeks * 20GB or about 3TB even if there are few changes in the documents. Its pretty inefficient use of space. Looking up previous versions of a file is very slow (tens of minutes). This is probably related to the previous issue - Windows has to traverse its all of its backups. I am considering using SVN + autocommit/autoupdate tortoisesvn. It will have the following advantages: Backups are easy and will also backup the whole history of each documents. (Just backup the repository). Creating previous versions can be frequent. I think svn commit / update can be done every two minutes or so. Users can sync over the net. However, I can see the following issues: More conflicts than the original setup because both multiple users can now edit the same file even both are online, i.e. can connect to the SVN repo. The users can off course lock the file first before editing, but that would mean they have to adjust. Delay on propagation of file changes. On windows 7 file sharing, changes made by one online user will be instantaneously available to other online users. With the SVN setup, changes will only be propagated when the users execute the svn add/commit/update sequence. Delay will be probably a few minutes. This workflow will no longer work: "Hi, I just edited document X, can you have a quick look?" I would like to ask the opinion of the community for alternative setups, or improvements on the above setups to work out the kinks.

    Read the article

  • Domain controller failed to restore using windows backup tools

    - by Peilin
    Domain controller failed to restore using windows backup tools One Issue: One 08 R2 domain controller with fully daily backup(only one controller in this company) is out of services due to hardware issue. The below two methods i tried to recover to the new purchasing server,but it is fail. 1)First Method: Using the windows 2008 R2 CD to boot and carry out recover from backup. Everything is OK, but after reboot it will come out blue screen and restart again. 2)Second Method: a)Install the OS in this new server b) Reboot the server to DSRM. c)Using the Windows Backup Tools to restore the system states only After reboot, it will come out the blue screen error and restart again. I know this is may be the different hardware issue, but how to resolve? Or can we only restore the AD services not whole system status? Any suggestion?

    Read the article

  • How to exclude directories from Mozy custom backup sets using spotlight queries

    - by bromfiets
    I would like to create custom backup sets for Mozy which exclude certain directories. For example, I would like to backup my Itunes folder, but exclude all podcasts. I have created a backup set which searches in /Users/me/Music and used this query kMDItemPath == "*Podcasts*"wc to exclude all matching files. However, nothing matches. Queries which use the kMDItemFSName spotlight attribute work fine, but any query using kMDItemPath doesn't seem to work at all. What am I doing wrong?

    Read the article

  • Why does Hyper-V and Windows Backup crash (BSOD) after successfull backup?

    - by Payson Welch
    Hello I am running Server 2008 R2 with a handful of Hyper-V guest nodes. If Windows backup runs without any of the Hyper-V nodes running, the server is fine. If Hyper-V runs a backup while the Hyper-V nodes are running, it is fine until a few minutes after the backup completes, and then it BSODs. The storage location for the backup is iSCSI - I am wondering if anyone has any input on what might be causing this? I don't have the Hyper-V nodes setup on a vlan and there is only one NIC on the server. Is it possible this is a networking / driver issue, and if so how would I reconfigure the networking to fix this?

    Read the article

  • Extracting httpdocs from Plesk Panel 9.5.4 Webserver backup file

    - by Paddington
    Good day, I am having problems manually extracting domains from Plesk 9.5 backup that was FTPed onto my back up server. I have followed the article http://kb.parallels.com/en/1757 using method 2. The problem is here: zcat DUMP_FILE.gz DUMP_FILE My backup file CP_1204131759.tar is a tar archive and zcat does not work with it. So I proceed to run the command: cat CP_1204131759.tar CP_1204131759. But when I try # cat CP_1204131759 | munpack I get an error that munpack did not find anything to read from standard input. I went on to extract the tar backup file using the xvf flags and got a lot of files (20) similar to these ones: CP_sapp-distrib.7686-0_1204131759.tgz CP_sapp-distrib.7686-35_1204131759.tgz CP_sapp-distrib.7686-6_1204131759.tgz How best can I extract the httpdocs of a domain from this server wide Plesk 9.5.4 backup?

    Read the article

  • Issue with Windows Server backup

    - by mamu
    I have windows server 2008 r2 installed, Only service running on it is hyper-v. I am trying to take backup using windows server backup feature and it fails with following error in eventlog The backup operation that started at '?2009?-?08?-?22T18:42:14.123000000Z' has failed because the Volume Shadow Copy Service operation to create a shadow copy of the volumes being backed up failed with following error code '2155348129'. Please review the event details for a solution, and then rerun the backup operation once the issue is resolved. Above error itself is point to other event logs for more detail but i can't find anything in event logs Then i ran following command vssadmin list writers It had following out of ordinary in list Writer name: 'Microsoft Hyper-V VSS Writer' Writer Id: {66841cd4-6ded-4f4b-8f17-fd23f8ddc3de} Writer Instance Id: {d15c5f78-121c-464f-b23b-f285e919b05c} State: [8] Failed Last error: Inconsistent shadow copy How could i resolve this?

    Read the article

  • best cloud storage + rsnapshot

    - by humbledude
    I’ve started using rsnapshot as my backup system for home PC. I really like the idea of hard links and how they are handled. But can’t find best workflow. Currently I keep my snapshots on the same partition and let’s say, copy newest one to a pendrive at the end of the week. Cloud storage is what I’m looking for. As of rsnapshot, Dropbox doesn’t fit my needs. More over there is no way to make it respect hard links — all snapshots are treated as a full snapshot. Renting a server is pretty expensive so my question is, are there better alternatives for backup in the cloud? I would like to benefit from hard links and send only incremental backups, just like in my local host.

    Read the article

  • Error while taking Transaction log backup

    - by Divya Kapoor
    Hello, I have sceduled a Transaction log back up schedule. But the backup is not happening. The error in the logs is this: Transaction Log Backup.Subplan_1,Error,0,ARCOTDB1\ARCOT_DB_INST1,Transaction Log Backup.Subplan_1,(Job outcome),,The job failed. Unable to determine if the owner (ARCOT-DB1\Superuser) of job Transaction Log Backup.Subplan_1 has server access (reason: Could not obtain information about Windows NT group /user 'ARCOT-DB1\Superuser'<c/> error code 0x534. [SQLSTATE 42000] (Error 15404)) Please help!

    Read the article

< Previous Page | 37 38 39 40 41 42 43 44 45 46 47 48  | Next Page >