From bbcb1d784b38d129a5fa41aa9f3dc7b798c25c9c Mon Sep 17 00:00:00 2001 From: Jared Camins-Esakov Date: Mon, 18 Jun 2012 17:16:31 -0400 Subject: [PATCH] Bug 8268: Add database dump to export tool This patch builds on work by Lars Wirzenius for the Koha packages. To date, the only way for a Koha librarian to obtain a complete backup of their system has been to log into the system via SSH (or FTP) to download the mysqldump file. This patch makes it possible for superlibrarians in properly configured systems to download night backups via the staff client's Export tool. Recognizing that this is functionality with potentially very grave security implications, system administrators must manually enable these features in the koha-conf.xml configuration file. The following configuration settings have been added to the koha-conf.xml file: * backupdir => directory where backups should be stored. * backup_db_via_tools => whether to allow superlibrarians to download database backups via the Export tool. The default is disabled, and there is no way -- by design -- to enable this option without manually editing koha-conf.xml. * backup_conf_via_tools => whether to allow superlibrarians to download configuration backups via the Export tool (this may be applicable to packages only). The default is disabled, and there is no way -- by design -- to enable this option without manually editing koha-conf.xml. This commit modifies the following scripts to make use of the new backupdir configuration option: * koha-dump and koha-run-backups in the Debian packages * The sample backup script misc/cronjobs/backup.sh Note that for security reasons, superlibrarians will not be allowed to download files that are not owned by the web server's effective user. This imposes a de facto dependency on ITK (for Apache) or running the web server as the Koha user (as is done with Plack). To test: 1. Apply patch. 2. Go to export page as a superlibrarian. Notice that no additional export options appear because they have not been enabled. 3. Add $KOHADEV/var/spool to the section of your koha-conf.xml (note that you will need to adjust that so that it is pointing at a logical directory). 4. Create the aforementioned directory. 5. Go to export page as a superlibrarian. Notice that no additional export options appear because they have not been enabled. 6. Add 1 to the section of your koha-conf.xml 7. Go to the export page as a superlibrarian. Notice the new tab. 8. Go to the export page as a non-superlibrarian. Notice there is no new tab. 9. Run: mysqldump -u koha -p koha | gzip > $BACKUPDIR/backup.sql.gz (substituting appropriate user, password, and database name) 10. Go to the export page as a superlibrarian, and look at the "Export database" tab. If you are running the web server as your Koha user, and ran the above command as your Koha user, you should now see the file listed as an option for download. 11. If you *did* see the file listed, change the ownership to something else: sudo chown root:root $BACKUPDIR/backup.sql.gz 11a. Confirm that you no longer see the file listed when you look at the "Export database" tab. 12. Change the ownership on the file to your web server (or Koha) user: sudo chown www-data:www-data backup.sql.gz 13. Go to the export page as a superlibrarian, and look at the "Export database" tab. You should now see backup.sql.gz listed. 14. Choose to download backup.sql.gz 15. Confirm that the downloaded file is what you were expecting. If you are interested, you can repeat the above steps but replace with , and instead of creating an sql file, create a tar file. To test packaging: run koha-dump, confirm that it still creates a usable backup. ------ This signoff contains two changes: 10-1. If no backup/conf files were present, then the message telling you so doesn't appear and the download button does. Made them behave correctly. 10-2. The test for a file existing required it to be owned by the webserver UID. This change makes it so it only has to be readable. Signed-off-by: Robin Sheat --- Makefile.PL | 9 ++ debian/scripts/koha-dump | 6 +- debian/scripts/koha-run-backups | 13 ++- debian/templates/koha-conf-site.xml.in | 6 ++ etc/koha-conf.xml | 6 ++ .../prog/en/modules/tools/export.tt | 58 ++++++++++++ misc/cronjobs/backup.sh | 18 ++-- tools/export.pl | 89 ++++++++++++++++++- 8 files changed, 185 insertions(+), 20 deletions(-) diff --git a/Makefile.PL b/Makefile.PL index 9ac147405e..46a35249bc 100644 --- a/Makefile.PL +++ b/Makefile.PL @@ -226,6 +226,10 @@ command-line, e.g., READMEs. Directory for Apache and Zebra logs produced by Koha. +=item BACKUP_DIR + +Directory for backup files produced by Koha. + =item PAZPAR2_CONF_DIR Directory for PazPar2 configuration files. @@ -293,6 +297,7 @@ my $target_map = { './services' => 'INTRANET_CGI_DIR', './skel' => 'NONE', './skel/var/log/koha' => { target => 'LOG_DIR', trimdir => -1 }, + './skel/var/spool/koha' => { target => 'BACKUP_DIR', trimdir => -1 }, './skel/var/run/koha/zebradb' => { target => 'ZEBRA_RUN_DIR', trimdir => -1 }, './skel/var/lock/koha/zebradb/authorities' => { target => 'ZEBRA_LOCK_DIR', trimdir => 6 }, './skel/var/lib/koha/zebradb/authorities/key' => { target => 'ZEBRA_DATA_DIR', trimdir => 6 }, @@ -548,6 +553,7 @@ my %test_suite_override_dirs = ( KOHA_CONF_DIR => ['etc'], ZEBRA_CONF_DIR => ['etc', 'zebradb'], LOG_DIR => ['var', 'log'], + BACKUP_DIR => ['var', 'spool'], SCRIPT_DIR => ['bin'], ZEBRA_LOCK_DIR => ['var', 'lock', 'zebradb'], ZEBRA_DATA_DIR => ['var', 'lib', 'zebradb'], @@ -1227,6 +1233,7 @@ sub get_target_directories { $dirmap{'DOC_DIR'} = File::Spec->catdir(@basedir, $package, 'doc'); $dirmap{'ZEBRA_LOCK_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lock', 'zebradb'); $dirmap{'LOG_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'log'); + $dirmap{'BACKUP_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); } elsif ($mode eq 'dev') { @@ -1256,6 +1263,7 @@ sub get_target_directories { $dirmap{'DOC_DIR'} = File::Spec->catdir(@basedir, $package, 'doc'); $dirmap{'ZEBRA_LOCK_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lock', 'zebradb'); $dirmap{'LOG_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'log'); + $dirmap{'BACKUP_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'spool'); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'lib', 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(@basedir, $package, 'var', 'run', 'zebradb'); } else { @@ -1277,6 +1285,7 @@ sub get_target_directories { $dirmap{'DOC_DIR'} = File::Spec->catdir(@basedir, $package, 'doc'); $dirmap{'ZEBRA_LOCK_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lock', $package, 'zebradb'); $dirmap{'LOG_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'log', $package); + $dirmap{'BACKUP_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'spool', $package); $dirmap{'ZEBRA_DATA_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'lib', $package, 'zebradb'); $dirmap{'ZEBRA_RUN_DIR'} = File::Spec->catdir(File::Spec->rootdir(), 'var', 'run', $package, 'zebradb'); } diff --git a/debian/scripts/koha-dump b/debian/scripts/koha-dump index 99c389415b..2fe9edddeb 100755 --- a/debian/scripts/koha-dump +++ b/debian/scripts/koha-dump @@ -44,7 +44,9 @@ mysqlhost="$( xmlstarlet sel -t -v 'yazgfs/config/hostname' $kohaconfig )" mysqldb="$( xmlstarlet sel -t -v 'yazgfs/config/database' $kohaconfig )" mysqluser="$( xmlstarlet sel -t -v 'yazgfs/config/user' $kohaconfig )" mysqlpass="$( xmlstarlet sel -t -v 'yazgfs/config/pass' $kohaconfig )" -dbdump="/var/spool/koha/$name/$name-$date.sql.gz" +backupdir="$( xmlstarlet sel -t -v 'yazgfs/config/backupdir' $kohaconfig )" +[ -z "$backupdir" ] && backupdir="/var/spool/koha/$name" +dbdump="$backupdir/$name-$date.sql.gz" echo "* DB to $dbdump" mysqldump --databases --host="$mysqlhost" \ --user="$mysqluser" --password="$mysqlpass" "$mysqldb" | @@ -54,7 +56,7 @@ chmod g+r "$dbdump" # Dump configs, logs, etc. -metadump="/var/spool/koha/$name/$name-$date.tar.gz" +metadump="$backupdir/$name-$date.tar.gz" echo "* configs, logs to $metadump" tar -C / -czf "$metadump" \ "etc/koha/sites/$name" \ diff --git a/debian/scripts/koha-run-backups b/debian/scripts/koha-run-backups index 7bf39c55f0..0aafc52309 100755 --- a/debian/scripts/koha-run-backups +++ b/debian/scripts/koha-run-backups @@ -17,7 +17,7 @@ # Daily cron job for koha. # - dump all sites, except one called 'demo' -dirname="/var/spool/koha" +dirname="" days="2" show_help() { @@ -58,10 +58,15 @@ done for name in $(koha-list --enabled | grep -Fxv demo) do koha-dump "$name" > /dev/null + if [ -z "$dirname"]; then + backupdir="$( xmlstarlet sel -t -v 'yazgfs/config/backupdir' /etc/koha/sites/$name/koha-conf.xml )"; + else + backupdir="$dirname/$name"; + fi # Remove old dump files. # FIXME: This could probably be replaced by one line of perl. - ls "$dirname/$name/" | + ls "$backupdir/" | sed "s:^$name-\([0-9-]*\)\.\(sql\|tar\)\.gz$:\1:" | sort -u | tac | @@ -69,8 +74,8 @@ do tac | while read date do - tardump="$dirname/$name/$name-$date.tar.gz" - sqldump="$dirname/$name/$name-$date.sql.gz" + tardump="$backupdir/$name-$date.tar.gz" + sqldump="$backupdir/$name-$date.sql.gz" if [ -e "$tardump" ] && [ -e "$sqldump" ] then rm "$tardump" diff --git a/debian/templates/koha-conf-site.xml.in b/debian/templates/koha-conf-site.xml.in index a440c96d11..d8fbd7c1f9 100644 --- a/debian/templates/koha-conf-site.xml.in +++ b/debian/templates/koha-conf-site.xml.in @@ -263,6 +263,12 @@ /usr/share/koha/intranet/htdocs/intranet-tmpl /usr/share/koha/intranet/htdocs/intranet-tmpl/prog/en/includes/ /var/log/koha/__KOHASITE__ + /var/lib/koha/__KOHASITE__ + + 0 + 0 /usr/share/koha/misc/koha-install-log 0 diff --git a/etc/koha-conf.xml b/etc/koha-conf.xml index f31e31c903..bb79355a9a 100644 --- a/etc/koha-conf.xml +++ b/etc/koha-conf.xml @@ -282,6 +282,12 @@ __PAZPAR2_TOGGLE_XML_POST__ __INTRANET_TMPL_DIR__ __INTRANET_TMPL_DIR__/prog/en/includes/ __LOG_DIR__ + __BACKUP_DIR__ + + 0 + 0 http://__PAZPAR2_HOST__:__PAZPAR2_PORT__/search.pz2 __MISC_DIR__/koha-install-log 0 diff --git a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/export.tt b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/export.tt index 635e0f8b84..798ab83105 100644 --- a/koha-tmpl/intranet-tmpl/prog/en/modules/tools/export.tt +++ b/koha-tmpl/intranet-tmpl/prog/en/modules/tools/export.tt @@ -26,6 +26,12 @@ $(document).ready(function() {

@@ -185,6 +191,58 @@ $(document).ready(function() {

+[% IF ( allow_db_export ) %] +
+
+

Note : This export file will be very large, and is generated nightly.

+
+ Choose a file + [% IF ( dbfiles && (dbfiles.size > 0) ) %] +
    + [% FOREACH dbfile IN dbfiles %] +
  • [% dbfile %]
  • + [% END %] +
+ [% ELSE %] +

Unfortunately, no backups are available.

+ [% END %] +
+ + [% IF ( dbfiles && (dbfiles.size > 0) ) %] + + +
+ [% END %] +
+
+[% END %] + +[% IF ( allow_conf_export ) %] +
+
+

Note : This export file will be very large, and is generated nightly.

+
+ Choose a file + [% IF ( conffiles && (conffiles.size > 0) ) %] +
    + [% FOREACH conffile IN conffiles %] +
  • [% conffile %]
  • + [% END %] +
+ [% ELSE %] +

Unfortunately, no backups are available.

+ [% END %] +
+ + [% IF ( conffiles && (conffiles.size > 0) ) %] + + +
+ [% END %] +
+
+[% END %] + diff --git a/misc/cronjobs/backup.sh b/misc/cronjobs/backup.sh index 38026cba6d..0806c6ca3b 100755 --- a/misc/cronjobs/backup.sh +++ b/misc/cronjobs/backup.sh @@ -1,23 +1,19 @@ #!/bin/sh # Script to create daily backups of the Koha database. # Based on a script by John Pennington +BACKUPDIR=`xmlstarlet sel -t -v 'yazgfs/config/backupdir' $KOHA_CONF` KOHA_DATE=`date '+%y%m%d'` -KOHA_DUMP=/tmp/koha-$KOHA_DATE.dump -KOHA_BACKUP=/tmp/koha-$KOHA_DATE.dump.gz +KOHA_BACKUP=$BACKUPDIR/koha-$KOHA_DATE.sql.gz -mysqldump --single-transaction -u koha -ppassword koha > $KOHA_DUMP && -gzip -f $KOHA_DUMP && -# Creates the dump file and compresses it; -# -u is the Koha user, -p is the password for that user. -# The -f switch on gzip forces it to overwrite the file if one exists. +mysqldump --single-transaction -u koha -ppassword koha | gzip -9 > $KOHA_BACKUP -mv $KOHA_BACKUP /home/kohaadmin && -chown kohaadmin.users /home/kohaadmin/koha-$KOHA_DATE.dump.gz && -chmod 600 /home/kohaadmin/koha-$KOHA_DATE.dump.gz && +#mv $KOHA_BACKUP /home/kohaadmin && +#chown kohaadmin.users /home/kohaadmin/koha-$KOHA_DATE.dump.gz && +#chmod 600 /home/kohaadmin/koha-$KOHA_DATE.dump.gz && # Makes the compressed dump file property of the kohaadmin user. # Make sure that you replace kohaadmin with a real user. -echo "$KOHA_BACKUP was successfully created." | mail kohaadmin -s $KOHA_BACKUP || +[ -f $KOHA_BACKUP] && echo "$KOHA_BACKUP was successfully created." | mail kohaadmin -s $KOHA_BACKUP || echo "$KOHA_BACKUP was NOT successfully created." | mail kohaadmin -s $KOHA_BACKUP # Notifies kohaadmin of (un)successful backup creation # EOF diff --git a/tools/export.pl b/tools/export.pl index 8c08602237..3355c2af7f 100755 --- a/tools/export.pl +++ b/tools/export.pl @@ -26,6 +26,7 @@ use C4::AuthoritiesMarc; # GetAuthority use CGI; use C4::Koha; # GetItemTypes use C4::Branch; # GetBranches +use Data::Dumper; my $query = new CGI; my $op=$query->param("op") || ''; @@ -33,7 +34,7 @@ my $filename=$query->param("filename"); my $dbh=C4::Context->dbh; my $marcflavour = C4::Context->preference("marcflavour"); -my ($template, $loggedinuser, $cookie) +my ($template, $loggedinuser, $cookie, $flags) = get_template_and_user ( { @@ -57,10 +58,23 @@ my ($template, $loggedinuser, $cookie) $branch = C4::Context->userenv->{'branch'}; } +my $backupdir = C4::Context->config('backupdir'); + if ($op eq "export") { + my $charset = 'utf-8'; + my $mimetype = 'application/octet-stream'; binmode STDOUT, ':encoding(UTF-8)'; - print $query->header( -type => 'application/octet-stream', - -charset => 'utf-8', + if ( $filename =~ m/\.gz$/ ) { + $mimetype = 'application/x-gzip'; + $charset = ''; + binmode STDOUT; + } elsif ( $filename =~ m/\.bz2$/ ) { + $mimetype = 'application/x-bzip2'; + binmode STDOUT; + $charset = ''; + } + print $query->header( -type => $mimetype, + -charset => $charset, -attachment=>$filename); my $record_type = $query->param("record_type"); @@ -159,6 +173,30 @@ if ($op eq "export") { push @sql_params, $authtype; } } + elsif ( $record_type eq 'db' ) { + my $successful_export; + if ( $flags->{superlibrarian} && C4::Context->config('backup_db_via_tools') ) { + $successful_export = download_backup( { directory => "$backupdir", extension => 'sql', filename => "$filename" } ) + } + unless ( $successful_export ) { + warn "A suspicious attempt was made to download the db at '$filename' by someone at " . $query->remote_host() . "\n"; + } + exit; + } + elsif ( $record_type eq 'conf' ) { + my $successful_export; + if ( $flags->{superlibrarian} && C4::Context->config('backup_conf_via_tools') ) { + $successful_export = download_backup( { directory => "$backupdir", extension => 'tar', filename => "$filename" } ) + } + unless ( $successful_export ) { + warn "A suspicious attempt was made to download the configuration at '$filename' by someone at " . $query->remote_host() . "\n"; + } + exit; + } + else { + # Someone is trying to mess us up + exit; + } my $sth = $dbh->prepare($sql_query); $sth->execute(@sql_params); @@ -259,6 +297,16 @@ else { push @authtypesloop, \%row; } + if ( $flags->{superlibrarian} && C4::Context->config('backup_db_via_tools') && $backupdir && -d $backupdir ) { + $template->{VARS}->{'allow_db_export'} = 1; + $template->{VARS}->{'dbfiles'} = getbackupfilelist( { directory => "$backupdir", extension => 'sql' } ); + } + + if ( $flags->{superlibrarian} && C4::Context->config('backup_conf_via_tools') && $backupdir && -d $backupdir ) { + $template->{VARS}->{'allow_conf_export'} = 1; + $template->{VARS}->{'conffiles'} = getbackupfilelist( { directory => "$backupdir", extension => 'tar' } ); + } + $template->param( branchloop => \@branchloop, itemtypeloop => \@itemtypesloop, @@ -268,3 +316,38 @@ else { output_html_with_http_headers $query, $cookie, $template->output; } + +sub getbackupfilelist { + my $args = shift; + my $directory = $args->{directory}; + my $extension = $args->{extension}; + my @files; + + if ( opendir(my $dir, $directory) ) { + while (my $file = readdir($dir)) { + next unless ( $file =~ m/\.$extension(\.(gz|bz2|xz))?/ ); + push @files, $file if ( -f "$backupdir/$file" && -r "$backupdir/$file" ); + } + closedir($dir); + } + return \@files; +} + +sub download_backup { + my $args = shift; + my $directory = $args->{directory}; + my $extension = $args->{extension}; + my $filename = $args->{filename}; + + return unless ( $directory && -d $directory ); + return unless ( $filename =~ m/$extension(\.(gz|bz2|xz))?$/ && not $filename =~ m#(^\.\.|/)# ); + $filename = "$directory/$filename"; + return unless ( -f $filename && -r $filename ); + return unless ( open(my $dump, '<', $filename) ); + binmode $dump; + while (read($dump, my $data, 64 * 1024)) { + print $data; + } + close ($dump); + return 1; +} -- 2.39.5