bug-wget
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Bug-wget] [PATCH 2/2] Rewrite the --rejected-log test using the new fra


From: Jookia
Subject: [Bug-wget] [PATCH 2/2] Rewrite the --rejected-log test using the new framework.
Date: Fri, 7 Aug 2015 07:58:30 +1000

 * tests/Test--rejected-log.px: Remove old test.
 * testenv/Test--rejected-log.py: Create new test.
---
 testenv/Makefile.am           |   1 +
 testenv/Test--rejected-log.py | 104 +++++++++++++++++++++++++++++++
 tests/Makefile.am             |   1 -
 tests/Test--rejected-log.px   | 138 ------------------------------------------
 4 files changed, 105 insertions(+), 139 deletions(-)
 create mode 100755 testenv/Test--rejected-log.py
 delete mode 100755 tests/Test--rejected-log.px

diff --git a/testenv/Makefile.am b/testenv/Makefile.am
index 8f1b5f4..c16a6c6 100644
--- a/testenv/Makefile.am
+++ b/testenv/Makefile.am
@@ -63,6 +63,7 @@ if HAVE_PYTHON3
     Test-Post.py                                    \
     Test-504.py                                     \
     Test--spider-r.py                               \
+    Test--rejected-log.py                           \
     Test-redirect-crash.py                          \
     Test-reserved-chars.py                          \
     Test-condget.py                                 \
diff --git a/testenv/Test--rejected-log.py b/testenv/Test--rejected-log.py
new file mode 100755
index 0000000..ef72794
--- /dev/null
+++ b/testenv/Test--rejected-log.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+from sys import exit
+from test.http_test import HTTPTest
+from misc.wget_file import WgetFile
+
+"""
+    This test executed Wget in recursive mode with a rejected log outputted.
+"""
+TEST_NAME = "Rejected Log"
+############# File Definitions ###############################################
+mainpage = """
+<html>
+<head>
+  <title>Main Page</title>
+</head>
+<body>
+  <p>
+    Recurse to a <a href="http://127.0.0.1:{{port}}/secondpage.html";>second 
page</a>.
+  </p>
+</body>
+</html>
+"""
+
+secondpage = """
+<html>
+<head>
+  <title>Second Page</title>
+</head>
+<body>
+  <p>
+    Recurse to a <a href="http://127.0.0.1:{{port}}/thirdpage.html";>third 
page</a>.
+    Try the blacklisted <a href="http://127.0.0.1:{{port}}/index.html";>main 
page</a>.
+  </p>
+</body>
+</html>
+"""
+
+thirdpage = """
+<html>
+<head>
+  <title>Third Page</title>
+</head>
+<body>
+  <p>
+    Try a hidden <a href="http://127.0.0.1:{{port}}/dummy.txt";>dummy file</a>.
+    Try to leave to <a href="http://no.such.domain/";>another domain</a>.
+  </p>
+</body>
+</html>
+"""
+
+robots = """
+User-agent: *
+Disallow: /dummy.txt
+"""
+
+log = """\
+REASON U_URL   U_SCHEME        U_HOST  U_PORT  U_PATH  U_PARAMS        U_QUERY 
U_FRAGMENT      P_URL   P_SCHEME        P_HOST  P_PORT  P_PATH  P_PARAMS        
P_QUERY P_FRAGMENT
+BLACKLIST      http%3A//127.0.0.1%3A{{port}}/index.html        SCHEME_HTTP     
127.0.0.1       {{port}}        index.html                              
http%3A//127.0.0.1%3A{{port}}/secondpage.html   SCHEME_HTTP     127.0.0.1       
{{port}}        secondpage.html                 
+ROBOTS http%3A//127.0.0.1%3A{{port}}/dummy.txt SCHEME_HTTP     127.0.0.1       
{{port}}        dummy.txt                               
http%3A//127.0.0.1%3A{{port}}/thirdpage.html    SCHEME_HTTP     127.0.0.1       
{{port}}        thirdpage.html                  
+SPANNEDHOST    http%3A//no.such.domain/        SCHEME_HTTP     no.such.domain  
80                                      
http%3A//127.0.0.1%3A{{port}}/thirdpage.html    SCHEME_HTTP     127.0.0.1       
{{port}}        thirdpage.html                  
+"""
+
+dummyfile = "Don't care."
+
+
+index_html = WgetFile ("index.html", mainpage)
+secondpage_html = WgetFile ("secondpage.html", secondpage)
+thirdpage_html = WgetFile ("thirdpage.html", thirdpage)
+robots_txt = WgetFile ("robots.txt", robots)
+dummy_txt = WgetFile ("dummy.txt", dummyfile)
+log_csv = WgetFile ("log.csv", log)
+
+WGET_OPTIONS = "-nd -r --rejected-log log.csv"
+WGET_URLS = [["index.html"]]
+
+Files = [[index_html, secondpage_html, thirdpage_html, robots_txt, dummy_txt]]
+
+ExpectedReturnCode = 0
+ExpectedDownloadedFiles = [index_html, secondpage_html, thirdpage_html, 
robots_txt, log_csv]
+# TODO: fix long line
+# TODO: check names
+
+################ Pre and Post Test Hooks #####################################
+pre_test = {
+    "ServerFiles"       : Files
+}
+test_options = {
+    "WgetCommands"      : WGET_OPTIONS,
+    "Urls"              : WGET_URLS
+}
+post_test = {
+    "ExpectedFiles"     : ExpectedDownloadedFiles,
+    "ExpectedRetcode"   : ExpectedReturnCode
+}
+
+err = HTTPTest (
+                name=TEST_NAME,
+                pre_hook=pre_test,
+                test_params=test_options,
+                post_hook=post_test
+).begin ()
+
+exit (err)
diff --git a/tests/Makefile.am b/tests/Makefile.am
index fae34d0..5d387aa 100644
--- a/tests/Makefile.am
+++ b/tests/Makefile.am
@@ -127,7 +127,6 @@ PX_TESTS = \
              Test--start-pos.px \
              Test--start-pos--continue.px \
              Test--httpsonly-r.px \
-             Test--rejected-log.px \
              Test-204.px
 
 EXTRA_DIST = FTPServer.pm FTPTest.pm HTTPServer.pm HTTPTest.pm \
diff --git a/tests/Test--rejected-log.px b/tests/Test--rejected-log.px
deleted file mode 100755
index 588d9c6..0000000
--- a/tests/Test--rejected-log.px
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/usr/bin/env perl
-
-use strict;
-use warnings;
-
-use HTTPTest;
-
-
-###############################################################################
-
-my $mainpage = <<EOF;
-<html>
-<head>
-  <title>Main Page</title>
-</head>
-<body>
-  <p>
-    Recurse to a <a href="http://localhost:{{port}}/secondpage.html";>second 
page</a>.
-  </p>
-</body>
-</html>
-EOF
-
-my $secondpage = <<EOF;
-<html>
-<head>
-  <title>Second Page</title>
-</head>
-<body>
-  <p>
-    Recurse to a <a href="http://localhost:{{port}}/thirdpage.html";>third 
page</a>.
-    Try the blacklisted <a href="http://localhost:{{port}}/index.html";>main 
page</a>.
-  </p>
-</body>
-</html>
-EOF
-
-my $thirdpage = <<EOF;
-<html>
-<head>
-  <title>Third Page</title>
-</head>
-<body>
-  <p>
-    Try a hidden <a href="http://localhost:{{port}}/dummy.txt";>dummy file</a>.
-    Try to leave to <a href="http://no.such.domain/";>another domain</a>.
-  </p>
-</body>
-</html>
-EOF
-
-my $robots = <<EOF;
-User-agent: *
-Disallow: /dummy.txt
-EOF
-
-my $log = <<EOF;
-REASON U_URL   U_SCHEME        U_HOST  U_PORT  U_PATH  U_PARAMS        U_QUERY 
U_FRAGMENT      P_URL   P_SCHEME        P_HOST  P_PORT  P_PATH  P_PARAMS        
P_QUERY P_FRAGMENT
-BLACKLIST      http%3A//localhost%3A{{port}}/index.html        SCHEME_HTTP     
localhost       {{port}}        index.html                              
http%3A//localhost%3A{{port}}/secondpage.html   SCHEME_HTTP     localhost       
{{port}}        secondpage.html                 
-ROBOTS http%3A//localhost%3A{{port}}/dummy.txt SCHEME_HTTP     localhost       
{{port}}        dummy.txt                               
http%3A//localhost%3A{{port}}/thirdpage.html    SCHEME_HTTP     localhost       
{{port}}        thirdpage.html                  
-SPANNEDHOST    http%3A//no.such.domain/        SCHEME_HTTP     no.such.domain  
80                                      
http%3A//localhost%3A{{port}}/thirdpage.html    SCHEME_HTTP     localhost       
{{port}}        thirdpage.html                  
-EOF
-
-# code, msg, headers, content
-my %urls = (
-    '/index.html' => {
-        code => "200",
-        msg => "Dontcare",
-        headers => {
-            "Content-type" => "text/html",
-        },
-        content => $mainpage,
-    },
-    '/secondpage.html' => {
-        code => "200",
-        msg => "Dontcare",
-        headers => {
-            "Content-type" => "text/html",
-        },
-        content => $secondpage,
-    },
-    '/thirdpage.html' => {
-        code => "200",
-        msg => "Dontcare",
-        headers => {
-            "Content-type" => "text/html",
-        },
-        content => $thirdpage,
-    },
-    '/dummy.txt' => {
-        code => "200",
-        msg => "Dontcare",
-        headers => {
-            "Content-type" => "text/plain",
-        },
-        content => "",
-    },
-    '/robots.txt' => {
-        code => "200",
-        msg => "Dontcare",
-        headers => {
-            "Content-type" => "text/plain",
-        },
-        content => $robots
-    },
-);
-
-my $cmdline = $WgetTest::WGETPATH . " -nd -r --rejected-log log.csv 
http://localhost:{{port}}/index.html";;
-
-my $expected_error_code = 0;
-
-my %expected_downloaded_files = (
-  "index.html" => {
-    content => $mainpage,
-  },
-  "secondpage.html" => {
-    content => $secondpage,
-  },
-  "thirdpage.html" => {
-    content => $thirdpage,
-  },
-  "robots.txt" => {
-    content => $robots,
-  },
-  "log.csv" => {
-    content => $log,
-  },
-);
-
-###############################################################################
-
-my $the_test = HTTPTest->new (input => \%urls,
-                              cmdline => $cmdline,
-                              errcode => $expected_error_code,
-                              output => \%expected_downloaded_files);
-exit $the_test->run();
-
-# vim: et ts=4 sw=4
-- 
2.5.0




reply via email to

[Prev in Thread] Current Thread [Next in Thread]