about summary refs log tree commit diff
path: root/nixpkgs/pkgs/tools/backup/store-backup/default.nix
blob: afa667ec0725100551df62e7a2fd25f0a20a9384 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
{lib, stdenv, which, coreutils, perl, fetchurl, makeWrapper, diffutils , writeScriptBin, bzip2}:

# quick usage:
# storeBackup.pl --sourceDir /home/user --backupDir /tmp/my_backup_destination
# Its slow the first time because it compresses all files bigger than 1k (default setting)
# The backup tool is bookkeeping which files got compressed

# btrfs warning: you may run out of hardlinks soon

# known impurity: test cases seem to bu using /tmp/storeBackup.lock ..

let dummyMount = writeScriptBin "mount" "#!${stdenv.shell}";
in

stdenv.mkDerivation rec {

  version = "3.5";

  pname = "store-backup";

  enableParallelBuilding = true;

  nativeBuildInputs = [ makeWrapper ];
  buildInputs = [ perl ];

  src = fetchurl {
    url = "https://download.savannah.gnu.org/releases/storebackup/storeBackup-${version}.tar.bz2";
    sha256 = "0y4gzssc93x6y93mjsxm5b5cdh68d7ffa43jf6np7s7c99xxxz78";
  };

  installPhase = ''
    mkdir -p $out/scripts
    mv * $out
    mv $out/_ATTENTION_ $out/doc
    mv $out/{correct.sh,cron-storebackup} $out/scripts

    find $out -name "*.pl" | xargs sed -i \
      -e 's@/bin/pwd@${coreutils}/bin/pwd@' \
      -e 's@/bin/sync@${coreutils}/bin/sync@' \
      -e '1 s@/usr/bin/env perl@${perl.withPackages (p: [ p.DBFile ])}/bin/perl@'

    for p in $out/bin/*
      do wrapProgram "$p" --prefix PATH ":" "${lib.makeBinPath [ which bzip2 ]}"
    done

    patchShebangs $out
    # do a dummy test ensuring this works

    PATH=$PATH:${dummyMount}/bin


    { # simple sanity test, test backup/restore of simple store paths

      mkdir backup

      backupRestore(){
        source="$2"
        echo =========
        echo RUNNING TEST "$1" source: "$source"
        mkdir restored

        $out/bin/storeBackup.pl --sourceDir "$source" --backupDir backup
        latestBackup=backup/default/$(ls -1 backup/default | sort | tail -n 1)
        $out/bin/storeBackupRecover.pl -b "$latestBackup" -t restored -r /
        ${diffutils}/bin/diff -r "$source" restored

        # storeBackupCheckSource should return 0
        $out/bin/storeBackupCheckSource.pl -s "$source" -b "$latestBackup"
        # storeBackupCheckSource should return not 0 when using different source
        ! $out/bin/storeBackupCheckSource.pl -s $TMP -b "$latestBackup"

        # storeBackupCheckBackup should return 0
        $out/bin/storeBackupCheckBackup.pl -c "$latestBackup"

        chmod -R +w restored
        rm -fr restored
      }

      testDir=$TMP/testDir

      mkdir $testDir
      echo X > $testDir/X
      ln -s ./X $testDir/Y

      backupRestore 'test 1: backup, restore' $testDir

      # test huge blocks, according to docs files bigger than 100MB get split
      # into pieces
      dd if=/dev/urandom bs=100M of=block-1 count=1
      dd if=/dev/urandom bs=100M of=block-2 count=1
      cat block-1 block-2 > $testDir/block
      backupRestore 'test 1 with huge block' $testDir

      cat block-2 block-1 > $testDir/block
      backupRestore 'test 1 with huge block reversed' $testDir

      backupRestore 'test 2: backup, restore' $out
      backupRestore 'test 3: backup, restore' $out
      backupRestore 'test 4: backup diffutils to same backup locations, restore' ${diffutils}
    }
  '';

  meta = {
    description = "A backup suite that stores files on other disks";
    homepage = "https://savannah.nongnu.org/projects/storebackup";
    license = lib.licenses.gpl3Plus;
    maintainers = [lib.maintainers.marcweber];
    platforms = lib.platforms.linux;
  };
}