[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

Re: [Bacula-devel] Director bug when using two storage daemons?


On Sat, Nov 15, 2008 at 09:51:05AM +0100, Kern Sibbald wrote:
> PS: you have done a good job creating a test. I wish more people would do so, 
> but there are just two more steps.  1. (a bit painful) is to convert your new 
> xxx-director.conf  into a xxx-director.conf.in by replacing all 
> the "variable" parameters by @xxx@ keywords.  Then 2. (trivial) adding one 
> new line in scripts/do_sed to cause the xxx-director.conf to be created using 
> your config file parameters by substituting them into the 
> xxx.bacula-dir.conf.in file.

OK, I've done that and the fixed patch is attached.
diff -Naur --exclude=test.out --exclude=working --exclude=build --exclude=bin --exclude=tmp --exclude=weird-files --exclude=docs --exclude=gui --exclude=bacula bacula/regress/scripts/broken-media-bug-bacula-dir.conf.in bacula.new2/regress/scripts/broken-media-bug-bacula-dir.conf.in
--- bacula/regress/scripts/broken-media-bug-bacula-dir.conf.in	1970-01-01 01:00:00.000000000 +0100
+++ bacula.new2/regress/scripts/broken-media-bug-bacula-dir.conf.in	2008-11-17 09:41:24.000000000 +0000
@@ -0,0 +1,134 @@
+# Configuration for the director for testing for the broken media bug.
+# Test written by Graham Keeling
+
+Director {                            # define myself
+  Name = @hostname@-dir
+  DIRPort = @dirport@                # where we listen for UA connections
+  QueryFile = "@scriptdir@/query.sql"
+  WorkingDirectory = "@working_dir@"
+  PidDirectory = "@piddir@"
+  SubSysDirectory = "@subsysdir@"
+  PluginDirectory = "@sbindir@"
+  Maximum Concurrent Jobs = 4
+  Password = "pNvX1WiXnwv2C/F7E52LGvw6rKjbbPvu2kyuPa9pVaL3"         # Console password
+  Messages = Standard
+}
+
+Job {
+  Name = "First"
+  Type = Backup
+  Client=@hostname@-fd 
+  FileSet="Set1"
+  Storage = File
+  Messages = Standard
+  Pool = Default
+  Write Bootstrap = "@working_dir@/NightlySave.bsr"
+  Maximum Concurrent Jobs = 4
+}
+
+Job {
+  Name = "Second"
+  Type = Backup
+  Client=@hostname@-fd 
+  FileSet="Set2"
+  Storage = File
+  Messages = Standard
+  Pool = Default
+  Write Bootstrap = "@working_dir@/NightlySave.bsr"
+  Maximum Concurrent Jobs = 4
+}
+
+# Standard Restore template, to be changed by Console program
+Job {
+  Name = "RestoreFiles"
+  Type = Restore
+  Client=@hostname@-fd 
+  FileSet="Set1"
+  Storage = File
+  Messages = Standard
+  Pool = Default
+  Where = @tmpdir@/bacula-restores
+}
+
+
+# List of files to be backed up
+FileSet {
+  Name = "Set1"
+  Include {  
+     Options { signature=MD5; }
+     File =  <@tmpdir@/file-list
+  }
+}
+
+FileSet {
+  Name = "Set2"
+  Include {  
+     Options { signature=MD5; }
+     File =  <@tmpdir@/file-list
+  }
+}
+
+
+# Client (File Services) to backup
+Client {
+  Name = @hostname@-fd
+  Address = @hostname@
+  FDPort = @fdport@
+  Catalog = MyCatalog
+  Password = "xevrjURYoCHhn26RaJoWbeWXEY/a3VqGKp/37tgWiuHc"          # password for FileDaemon
+  File Retention = 30d                # 30 days
+  Job Retention = 180d                # six months
+  AutoPrune = yes                     # Prune expired Jobs/Files
+  Maximum Concurrent Jobs = 4
+}
+
+# Definiton of file storage device
+Storage {
+  Name = File
+  Address = @hostname@                # N.B. Use a fully qualified name here
+  SDPort = @sdport@
+  Password = "ccV3lVTsQRsdIUGyab0N4sMDavui2hOBkmpBU0aQKOr9"
+  Device = FileStorage
+  Media Type = File
+  Maximum Concurrent Jobs = 4
+}
+
+# Generic catalog service
+Catalog {
+  Name = MyCatalog
+  
+  dbname = @db_name@; user = @db_user@; password = "@db_password@"
+}
+
+# Reasonable message delivery -- send most everything to email address
+#  and to the console
+Messages {
+  Name = Standard
+  mailcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %r\" 
+-s \"Regression: %t %e of %c %l\" %r"
+  operatorcommand = "@sbindir@/bsmtp -h localhost -f \"\(Bacula regression\) %
+r\" -s \"Regression: Intervention needed for %j\" %r"
+# MailOnError = @job_email@ = all
+# operator = @job_email@ = mount
+
+  console = all, !skipped, !terminate, !restored
+#
+# WARNING! the following will create a file that you must cycle from
+#          time to time as it will grow indefinitely. However, it will
+#          also keep all your messages if the scroll off the console.
+#
+  append = "@working_dir@/log" = all, !skipped
+  catalog = all, !skipped
+}
+    
+# Default pool definition
+Pool {
+  Name = Default
+  Pool Type = Backup
+  Recycle = yes                       # Bacula can automatically recycle Volumes
+  AutoPrune = yes                     # Prune expired volumes
+  Volume Retention = 365d             # one year
+  Maximum Volume Jobs = 1
+  Label Format = TestVolume
+  Maximum Volumes = 0
+}
diff -Naur --exclude=test.out --exclude=working --exclude=build --exclude=bin --exclude=tmp --exclude=weird-files --exclude=docs --exclude=gui --exclude=bacula bacula/regress/scripts/do_sed bacula.new2/regress/scripts/do_sed
--- bacula/regress/scripts/do_sed	2008-11-17 09:35:21.000000000 +0000
+++ bacula.new2/regress/scripts/do_sed	2008-11-17 09:40:38.000000000 +0000
@@ -39,6 +39,7 @@
 sed -f ${out} ${cwd}/scripts/bacula-sd-2disk.conf.in >${cwd}/scripts/bacula-sd-2disk.conf
 sed -f ${out} ${cwd}/scripts/bacula-sd-2drive.conf.in >${cwd}/scripts/bacula-sd-2drive.conf
 sed -f ${out} ${cwd}/scripts/bacula-sd-2disk-drive.conf.in >${cwd}/scripts/bacula-sd-2disk-drive.conf
+sed -f ${out} ${cwd}/scripts/broken-media-bug-bacula-dir.conf.in >${cwd}/scripts/broken-media-bug-bacula-dir.conf
 sed -f ${out} ${cwd}/scripts/cleanup-tape.in >${cwd}/scripts/cleanup-tape
 sed -f ${out} ${cwd}/scripts/cleanup-2tape.in >${cwd}/scripts/cleanup-2tape
 sed -f ${out} ${cwd}/scripts/cleanup-2drive.in >${cwd}/scripts/cleanup-2drive
diff -Naur --exclude=test.out --exclude=working --exclude=build --exclude=bin --exclude=tmp --exclude=weird-files --exclude=docs --exclude=gui --exclude=bacula bacula/regress/tests/broken-media-bug-test bacula.new2/regress/tests/broken-media-bug-test
--- bacula/regress/tests/broken-media-bug-test	1970-01-01 01:00:00.000000000 +0100
+++ bacula.new2/regress/tests/broken-media-bug-test	2008-11-17 09:42:12.000000000 +0000
@@ -0,0 +1,63 @@
+#!/bin/sh
+#
+# Show the broken media bug. Test by Graham Keeling.
+#
+TestName="broken-media-bug-test"
+. scripts/functions
+
+cwd=`pwd`
+scripts/cleanup
+/bin/cp -f scripts/broken-media-bug-bacula-dir.conf bin/bacula-dir.conf
+/bin/cp -f scripts/test-bacula-sd.conf bin/bacula-sd.conf
+/bin/cp -f scripts/test-bacula-fd.conf bin/bacula-fd.conf
+/bin/cp -f scripts/test-console.conf bin/bconsole.conf
+hugefile=${cwd}/build/hugefile
+hugefilesize=300
+echo "${cwd}/build" >${cwd}/tmp/file-list
+
+start_test
+
+echo "Creating huge ${hugefilesize}M file..."
+dd if=/dev/urandom of="$hugefile" bs=1M count="$hugefilesize"
+echo "Done"
+
+cat >tmp/bconcmds <<END_OF_DATA
+setdebug level=150  storage=File
+setdebug level=150  Director
+messages
+@$out tmp/log1.out
+run job=First yes
+messages
+quit
+END_OF_DATA
+
+run_bacula
+
+# Give the first job a bit of time to get going.
+sleep 5
+
+cat >tmp/bconcmds <<END_OF_DATA
+setdebug level=150  storage=File
+setdebug level=150  Director
+list volumes
+llist volume=TestVolume0001
+messages
+@$out tmp/log2.out
+run job=Second yes
+wait
+messages
+restore fileset=Set1 where=${cwd}/tmp/bacula-restores select all storage=File done
+yes
+wait
+messages
+quit
+END_OF_DATA
+
+run_bconsole
+check_for_zombie_jobs storage=File
+stop_bacula
+
+check_two_logs
+check_restore_diff
+rm -f "$hugefile"
+end_test
-------------------------------------------------------------------------
This SF.Net email is sponsored by the Moblin Your Move Developer's challenge
Build the coolest Linux based applications with Moblin SDK & win great prizes
Grand prize is a trip for two to an Open Source event anywhere in the world
http://moblin-contest.org/redirect.php?banner_id=100&url=/
_______________________________________________
Bacula-devel mailing list
Bacula-devel@xxxxxxxxxxxxxxxxxxxxx
https://lists.sourceforge.net/lists/listinfo/bacula-devel


This mailing list archive is a service of Copilot Consulting.