Mar 042016
 

Installing on Centos 6

I need PostgreSQL 9.3 on my Centos 6 box

yum localinstall https://download.postgresql.org/pub/repos/yum/9.3/redhat/rhel-6-x86_64/pgdg-centos93-9.3-2.noarch.rpm

yum list postgres*
yum install postgresql93-server
service postgresql-9.3 initdb

chkconfig postgresql-9.3 
service postgresql-9.3 start
service postgresql-9.3 status

 

Login to database

sudo -i -u postgres
psql --username=postgres -l 
# list all databases on psql prompt
\list or \l
# list all tables in the current database
\dt

 

Enabling password auth

sudo vi /var/lib/pgsql/data/pg_hba.conf

Find the lines that looks like this, near the bottom of the file:

# pg_hba.conf excerpt (original)
host    all             all             127.0.0.1/32            ident
host    all             all             ::1/128                 ident

Then replace “ident” with “md5”, so they look like this:

# pg_hba.conf excerpt (updated)
host    all             all             127.0.0.1/32            md5
host    all             all             ::1/128                 md5

Save and exit. PostgreSQL is now configured to allow password authentication.

Extensions

IF you need extensions, like RegExp-optimized index extension pg_trgm, you are going to need postgresql-contrib package. Once you have it installed, run:

$ sudo -u postgres sh
$ psql puppetdb -c 'create extension pg_trgm'
$ exit

 

File based backup

service postgresql stop
tar -czf /backup/full_postgres_backup-`date +%Y%m%d`.tar.gz' /var/lib/pgsql/data
service postgresql start

Single DB dump, for example spacewalk database

su - postgres -c 'pg_dump rhnschema |gzip -c > /backup/rhnschema_postgres_backup-`date +%Y%m%d`.sql.gz'

Backing up from cron

Moar scripts here https://github.com/zmielna/backup_scripts

# crontab -l
01 3 * * * /root/bin/postgresql_dump.sh 2>&1 |logger

vim /root/bin/postgresql_dump.sh

#!/bin/bash
#############################################################################
# Simple script to create a snapshot of a PostgreSQL databases.
# Can be run from cron like that
# 01 3 * * * /root/bin/postgresql_dump.sh 2>&1 |logger
# 
# Send bugreports, fixes, enhancements, t-shirts, money, beer & pizza to [email protected]
#############################################################################

#------------ variables
# Directory to store backups in
DST=/backup/dbback_postgresql
# DATABASES="postgres rhnschema"
DATABASES=`su - postgres -c "psql --username=postgres -l -x"|grep Name|grep -v template|cut -d"|" -f2|xargs`

# Any backups older than this will be deleted first
KEEPDAYS=7
DATE=$(date  +%Y-%m-%d)
#------------ code
/bin/logger "Starting PostgreSQL Dump....."
# cd $DST
find ${DST} -type f -mtime +${KEEPDAYS} -exec rm -f {} \;
rmdir $DST/* 2>/dev/null
mkdir -p ${DST}/${DATE}
chown postgres. ${DST}/${DATE}
for db in $DATABASES ; do
        echo -n "Backing up ${db}... " | logger
	su - postgres -c  "pg_dump ${db} |gzip -c > ${DST}/${DATE}/${db}-`date +%Y%m%d`.sql.gz"
        echo -n "Done with $db." | logger
done
/bin/logger "OK, all PostgreSQL dumps done in $DST"
Jul 172015
 

Intro

It took me a while to figure out optimal configuration for the tape library with two streamers used with Bacula backup software.

Exact model of tape library in use is Quantum Scalar i40 with two LTO5 streamers. It is hooked up directly to the main NFS server (so heavy backup traffic goes via localhost only) – server that runs bacula-sd and bacula-fd services only. Bacula director runs on separate, dedicated backup server.

Currently there are around 20 other servers connected to this system as clients, with various daily Incremental, weekly Differential and monthly Full backup level jobs scheduled for execution.

Some additional info about this setup in previous post – click here. Config files below:

insta-24

 


Relevant config files from Backup server


/etc/bacula/bacula-dir.conf

Director {  
  Name = prod-backup-dir
  QueryFile = "/etc/bacula/scripts/query.sql"
  WorkingDirectory = "/var/lib/bacula"
  PidDirectory = "/var/run/bacula"
  Password = "xxxxx"
  Messages = Daemon
  DirAddress = prod-backup.domain.com
  Maximum Concurrent Jobs = 20
}
@/etc/bacula/JobDefs/JobDefs.conf
@|"sh -c 'cat /etc/bacula/Job/*'"
@|"sh -c 'cat /etc/bacula/FileSet/*'"
@|"sh -c 'cat /etc/bacula/Schedule/*'"
@|"sh -c 'cat /etc/bacula/Clients-enabled/*'"
@|"sh -c 'cat /etc/bacula/Storage/*'"
@|"sh -c 'cat /etc/bacula/Pool/*'"
Catalog {
  Name = MyCatalog
  dbaddress = prod-db.domain.com ;
  dbname = "bacula"; dbuser = "bacula"; dbpassword = "xxxxx"
}
Messages {
  Name = Standard
  mailcommand = "/usr/lib/bacula/bsmtp -h prod-mailhub.domain.com -f \"\(Bacula\) \<%r\>\" -s \"Bacula: %t %e of %c %l\" %r"
  operatorcommand = "/usr/lib/bacula/bsmtp -h prod-mailhub.domain.com  -f \"\(Bacula\) \<%r\>\" -s \"Bacula: Intervention needed for %j\" %r"
  mail = [email protected] = all, !skipped            
  operator = [email protected] = mount
  console = all, !skipped, !saved
  append = "/var/lib/bacula/log" = all, !skipped
  catalog = all
}
Messages {
  Name = Daemon
  mailcommand = "/usr/lib/bacula/bsmtp -h localhost -f \"\(Bacula\) \<%r\>\" -s \"Bacula client %c job %n exit code %e  \" %r"
  mail = [email protected] = all, !skipped            
  console = all, !skipped, !saved
  append = "/var/lib/bacula/log" = all, !skipped
}
Console {
  Name = prod-backup-mon
  Password = "xxxxxxxxxxx"
  CommandACL = status, .status
}

Example job definition /etc/bacula/Job/Studies2010-1.conf

#----------------------------------
Job {
  Name = Studies2010-1
  Type = Backup
  Client = nfs-prod-fd
  Schedule = MonthlyCycle
  Messages = Daemon
  FileSet = Studies2010-1
  Level = Full
  Pool = lto5-pool
  Priority = 12
  Max Run Time = 1555200 # default limit is 6 days, 518400sec. bumped 3x just in case
  Spool Data = yes
  Spool Attributes = yes

}
#----------------------------------

Example fileset, /etc/bacula/FileSet/Studies2010-1.conf

#-------------------------------------------
FileSet {
  Name = "Studies2010-1"
  Include {
    Options {
      signature = MD5
      compression=GZIP5
      noatime=yes
      aclsupport = yes
      wilddir = "/export/studies/201007*"
      wilddir = "/export/studies/201008*"
     	    }
    Options {
      RegexDir = ".*"
      exclude = yes
	    }
    File = "/export/studies"
          }
}

Example Schedule, /etc/bacula/Schedule/MonthlyCycle3.conf

Schedule {
  Name = MonthlyCycle3
  Run = Level=Full Pool=lto5-pool 3rd fri at 23:30
}

Tape library, storage definition:

Storage {
  Name = TapeLibrary
  Address = prod-tapelib.comain.com
  SDPort = 9103
  Password = "xxxxxx"
  Device = QuantumScalar-I40
  Media Type = LTO-5
  Autochanger = yes
  Maximum Concurrent Jobs = 4
}

Pool of tapes defined here:

Pool {
  Name = lto5-pool
  Pool Type = Backup
  Volume Retention = 6 months
  Recycle = yes
  AutoPrune = yes
  Recycle = yes
  Label Format = LTO5
  Storage = TapeLibrary
}

 

Relevant config files from Tape Library server

 

Note that I spool data before saving to the tape – this prevents tape “shoe shine” during Incremental/Differential backups.

 

  
Storage { 
  Name = TapeLibrary
  WorkingDirectory = "/var/spool/bacula"
  Pid Directory = "/var/run"
}
Autochanger {
  Name = QuantumScalar-I40
  Device = Drive0
  Device = Drive1
  Changer Device = /dev/changer
  Changer Command = "/usr/libexec/bacula/mtx-changer %c %o %S %a %d"
}
Device {
  Name = Drive0
  Drive Index = 0
  Media Type = LTO-5
  Archive Device = /dev/nst0
  AutomaticMount = yes
  AlwaysOpen = yes
  RemovableMedia = yes
  RandomAccess = no
  AutoChanger = yes
  Alert Command = "sh -c 'smartctl -H -l error %c'"  
  Maximum Changer Wait = 600
  Maximum Rewind Wait = 600
  Maximum Open Wait = 600
  Spool Directory = /var/spool/bacula/Spool
  Maximum Spool Size = 45G
  Maximum Concurrent Jobs = 2
}
Device {
  Name = Drive1
  Drive Index = 1
  Media Type = LTO-5
  Archive Device = /dev/nst1
  AutomaticMount = yes
  AlwaysOpen = yes
  RemovableMedia = yes
  RandomAccess = no
  AutoChanger = yes
  Alert Command = "sh -c 'smartctl -H -l error %c'"
  Maximum Changer Wait = 600
  Maximum Rewind Wait = 600
  Maximum Open Wait = 600
  Spool Directory = /var/spool/bacula/Spool
  Maximum Spool Size = 45G
  Maximum Concurrent Jobs = 2
     }
Messages {
  Name = Standard
  director = prod-backup-dir = all
}
Director {
  Name = prod-backup-dir
  Password = "xxxxxxxx"
}
Director {
  Name = prod-backup-mon
  Password = "xxxxxxxxxx"
  Monitor = yes
}

Thoughts

Implementing Bacula driven backup solution requires some time and effort – but what you get in the end is sophisticated, enterprise grade backup system, capable of backing up TBs of data in organised and efficient manner.

Used in conjunction with Monitoring system it offers fully automated backup solution, with minimal operator effort required. Routine tasks boil down to: