diff options
Diffstat (limited to 'nixpkgs/pkgs/tools/networking/swec/default.nix')
-rw-r--r-- | nixpkgs/pkgs/tools/networking/swec/default.nix | 74 |
1 files changed, 0 insertions, 74 deletions
diff --git a/nixpkgs/pkgs/tools/networking/swec/default.nix b/nixpkgs/pkgs/tools/networking/swec/default.nix deleted file mode 100644 index 6751b1cf5eb8..000000000000 --- a/nixpkgs/pkgs/tools/networking/swec/default.nix +++ /dev/null @@ -1,74 +0,0 @@ -{ fetchurl, lib, stdenv, makeWrapper, perlPackages }: - -stdenv.mkDerivation rec { - pname = "swec"; - version = "0.4"; - - src = fetchurl { - url = "http://files.zerodogg.org/swec/swec-${version}.tar.bz2"; - sha256 = "1m3971z4z1wr0paggprfz0n8ng8vsnkc9m6s3bdplgyz7qjk6jwx"; - }; - - nativeBuildInputs = [ makeWrapper ]; - buildInputs = [ perlPackages.perl perlPackages.LWP perlPackages.URI perlPackages.HTMLParser ]; - checkInputs = [ perlPackages.HTTPServerSimple perlPackages.Parent ]; - - configurePhase = '' - for i in swec tests/{runTests,testServer} - do - sed -i "$i" -e's|/usr/bin/perl|${perlPackages.perl}/bin/perl|g' - done - ''; - - dontBuild = true; - - installPhase = '' - make install prefix="$out" - - mkdir -p "$out/share/swec-${version}" - cp -v default.sdf "$out/share/swec-${version}" - sed -i "$out/bin/swec" -e"s|realpath(\$0)|'$out/share/swec-${version}/swec'|g" - - wrapProgram "$out/bin/swec" \ - --prefix PERL5LIB : ${with perlPackages; makePerlPath [ LWP URI HTMLParser ]} - ''; - - doCheck = true; - checkPhase = "make test"; - - meta = { - homepage = "https://random.zerodogg.org/swec/"; - - description = "Simple Web Error Checker (SWEC)"; - - longDescription = - '' SWEC (Simple Web Error Checker) is a program that automates testing - of dynamic websites. It parses each HTML file it finds for links, - and if those links are within the site specified (ie. local, not - external), it will check that page as well. In this respect it - works a lot like a crawler, in that it'll click on any link it finds - (more notes about this later). - - In addition to parsing and locating links, it will also parse the - pages looking for known errors and report those (such as Mason or - PHP errors), and will report if a page can not be read (by either - returning a 404, 500 or similar). - - Since you may often want SWEC to be logged in on your site, you have - to be careful. When logged in, SWEC will still click on all links - it finds, including things like 'join group' or 'delete account' - (though it has some magic trying to avoid the latter). Therefore it - is highly recommended that when you run SWEC as a logged-in user on - a site, use a test server, not the live one. - - Running SWEC on a live site without being logged in as a user is - perfectly fine, it won't do anything a normal crawler wouldn't do - (well, not exactly true, SWEC will ignore robots.txt). - ''; - - license = lib.licenses.gpl3Plus; - - maintainers = [ ]; - platforms = lib.platforms.linux; - }; -} |