diff options
Diffstat (limited to 'yarns.webapp/900-implementations.yarn')
-rw-r--r-- | yarns.webapp/900-implementations.yarn | 442 |
1 files changed, 442 insertions, 0 deletions
diff --git a/yarns.webapp/900-implementations.yarn b/yarns.webapp/900-implementations.yarn new file mode 100644 index 0000000..54a82a4 --- /dev/null +++ b/yarns.webapp/900-implementations.yarn @@ -0,0 +1,442 @@ +Implementations +=============== + +This chapter includes IMPLEMENTS sections for the various steps used +in scenarios. + +Managing a WEBAPP instance +-------------------------- + +We're testing a web application (convenivently named WEBAPP, though +the executable is `lorry-controller-webapp`), so we need to be able to +start it and stop it in scenarios. We start it as a background +process, and keep its PID in `$DATADIR/webapp.pid`. When it's time to +kill it, we kill the process with the PID in that file. This is not +perfect, though it's good enough for our purposes. It doesn't handle +running multiple instances at the same time, which we don't need, and +doens't handle the case of the process dying and the kernel re-using +the PID for something else, which is quite unlikely. + +Start an instance of the WEBAPP, using a random port. Record the PID +and the port. Listen only on localhost. We use `start-stop-daemon` to +start the process, so that it can keep running in the background, +but the shell doesn't wait for it to terminate. This way, WEBAPP will +be running until it crashes or is explicitly killed. + + IMPLEMENTS GIVEN a running WEBAPP + start_webapp + + IMPLEMENTS WHEN WEBAPP is started + start_webapp + +Kill the running WEBAPP, using the recorded PID. We need to do this +both as a WHEN and a FINALLY step. + + IMPLEMENTS WHEN WEBAPP is terminated + kill_daemon_using_pid_file "$DATADIR/webapp.pid" + + IMPLEMENTS FINALLY WEBAPP terminates + kill_daemon_using_pid_file "$DATADIR/webapp.pid" + +Also test that WEBAPP isn't running. + + IMPLEMENTS THEN WEBAPP isn't running + pid=$(head -n1 "$DATADIR/webapp.pid") + if kill -0 "$pid" + then + echo "process $pid is still running, but shouldn't be" 1>&2 + exit 1 + fi + +Managing Lorry Controller configuration +--------------------------------------- + +We need to be able to create, and change, the `lorry-controller.conf` +file, and other files, in CONFGIT. First of all, we need to create +CONFGIT. + + IMPLEMENTS GIVEN a new git repository in (\S+) + git init "$DATADIR/$MATCH_1" + +Then we need to create an empty `lorry-controller.conf` file there. +This is not just an empty file, it must be a JSON file that contains +an empty list object. + + IMPLEMENTS GIVEN an empty lorry-controller.conf in (\S+) + printf '[]\n' > "$DATADIR/$MATCH_1/lorry-controller.conf" + +Set the contents of `lorry-controller.conf` from a textual form. + + IMPLEMENTS GIVEN a lorry-controller.conf in (\S+) containing "(.*)"$ + printf '%s\n' "$MATCH_2" > "$DATADIR/$MATCH_1/lorry-controller.conf" + +Add a `.lorry` file to be used by a `lorry-controller.conf`. + + IMPLEMENTS GIVEN Lorry file (\S+) with (.*) + printf '%s\n' "$MATCH_2" > "$DATADIR/$MATCH_1" + +Remove a file. This is actually quite generic, but it's relevant to us +for `.lorry` files only (when this is being written). + + IMPLEMENTS GIVEN file (\S+) is removed + rm "$DATADIR/$MATCH_1" + +Add a `lorries` section to a `lorry-controller.conf`. This hardcodes +most of the configuration. + + IMPLEMENTS GIVEN (\S+) in (\S+) adds lorries (\S+) using prefix (\S+) + python -c ' + import os + import json + + DATADIR = os.environ["DATADIR"] + MATCH_1 = os.environ["MATCH_1"] + MATCH_2 = os.environ["MATCH_2"] + MATCH_3 = os.environ["MATCH_3"] + MATCH_4 = os.environ["MATCH_4"] + + new = { + "type": "lorries", + "interval": "0s", + "prefix": MATCH_4, + "globs": [ + MATCH_3, + ], + } + + filename = os.path.join(DATADIR, MATCH_2, MATCH_1) + with open(filename, "r") as f: + obj = json.load(f) + obj.append(new) + with open(filename, "w") as f: + json.dump(obj, f) + ' + +Add a `troves` section to `lorry-controller.conf`. Again, we hardcode +most of the configuration. + + IMPLEMENTS GIVEN (\S+) in (\S+) adds trove (\S+) + python -c ' + import os + import json + + DATADIR = os.environ["DATADIR"] + MATCH_1 = os.environ["MATCH_1"] + MATCH_2 = os.environ["MATCH_2"] + MATCH_3 = os.environ["MATCH_3"] + + new = { + "type": "troves", + "trovehost": MATCH_3, + "protocol": "ssh", + "interval": "0s", + "ls-interval": "0s", + "prefixmap": {}, + "ignore": [], + } + + filename = os.path.join(DATADIR, MATCH_2, MATCH_1) + with open(filename, "r") as f: + obj = json.load(f) + obj.append(new) + with open(filename, "w") as f: + json.dump(obj, f, indent=4) + ' + +Set the a specific field for all sections in a `lorry-controller.conf` +file. + + IMPLEMENTS GIVEN (\S+) in (\S+) has (\S+) set to (.+) for everything + python -c ' + import os + import json + + DATADIR = os.environ["DATADIR"] + MATCH_1 = os.environ["MATCH_1"] + MATCH_2 = os.environ["MATCH_2"] + MATCH_3 = os.environ["MATCH_3"] + MATCH_4 = os.environ["MATCH_4"] + + filename = os.path.join(DATADIR, MATCH_2, MATCH_1) + + with open(filename, "r") as f: + obj = json.load(f) + + for section in obj: + section[MATCH_3] = json.loads(MATCH_4) + + with open(filename, "w") as f: + json.dump(obj, f, indent=4) + ' + +Set a specific field for a `troves` section. + + IMPLEMENTS GIVEN (\S+) in (\S+) sets (\S+) to (\S+) for trove (\S+) + python -c ' + import os + import json + + DATADIR = os.environ["DATADIR"] + MATCH_1 = os.environ["MATCH_1"] + MATCH_2 = os.environ["MATCH_2"] + MATCH_3 = os.environ["MATCH_3"] + MATCH_4 = os.environ["MATCH_3"] + MATCH_5 = os.environ["MATCH_3"] + + filename = os.path.join(DATADIR, MATCH_2, MATCH_1) + + with open(filename, "r") as f: + obj = json.load(f) + + for section in obj: + if section["type"] in ["trove", "troves"]: + if section["trovehost"] == MATCH_5: + section[MATCH_3] = json.loads(MATCH_4) + + with open(filename, "w") as f: + json.dump(obj, f, indent=4) + ' + +Set the prefixmap for a Trove in a Lorry Controller configuration +file. Note that the Trove must already be in the configuration file. + + IMPLEMENTS GIVEN (\S+) in (\S+) has prefixmap (\S+):(\S+) for (\S+) + python -c ' + import os + import json + + DATADIR = os.environ["DATADIR"] + MATCH_1 = os.environ["MATCH_1"] + MATCH_2 = os.environ["MATCH_2"] + MATCH_3 = os.environ["MATCH_3"] + MATCH_4 = os.environ["MATCH_4"] + MATCH_5 = os.environ["MATCH_5"] + + filename = os.path.join(DATADIR, MATCH_2, MATCH_1) + with open(filename, "r") as f: + objs = json.load(f) + + for obj in objs: + if obj["type"] == "troves" and obj["trovehost"] == MATCH_5: + obj["prefixmap"][MATCH_3] = MATCH_4 + + with open(filename, "w") as f: + json.dump(objs, f, indent=4) + ' + +We need to be able to tell WEBAPP, when it runs, where the +configuration directory is. + + IMPLEMENTS GIVEN WEBAPP uses (\S+) as its configuration directory + add_to_config_file "$DATADIR/webapp.conf" \ + configuration-directory "$DATADIR/$MATCH_1" + +Make WEBAPP fake access to a Trove using a static file. + + IMPLEMENTS GIVEN WEBAPP fakes Trove (\S+) + add_to_config_file "$DATADIR/webapp.conf" \ + debug-fake-trove "$MATCH_1=$DATADIR/$MATCH_1.trove" + +Control the ls listing of a remote Trove. + + IMPLEMENTS GIVEN remote Trove (\S+) has repository (\S+) + filename="$DATADIR/$MATCH_1.trove" + if [ ! -e "$filename" ] + then + echo "{}" > "$filename" + fi + cat "$filename" + python -c ' + import json, os, sys + MATCH_2 = os.environ["MATCH_2"] + filename = sys.argv[1] + with open(filename) as f: + data = json.load(f) + data["ls-output"] = data.get("ls-output", []) + [MATCH_2] + with open(filename, "w") as f: + json.dump(data, f) + ' "$filename" + +Remove a repository from the fake remote Trove. + + IMPLEMENTS GIVEN remote Trove (\S+) doesn't have repository (\S+) + filename="$DATADIR/$MATCH_1.trove" + if [ ! -e "$filename" ] + then + echo "{}" > "$filename" + fi + cat "$filename" + python -c ' + import json, os, sys + MATCH_2 = os.environ["MATCH_2"] + filename = sys.argv[1] + with open(filename) as f: + data = json.load(f) + paths = data.get("ls-output", []) + if MATCH_2 in paths: + paths.remove(MATCH_2) + data["ls-output"] = paths + with open(filename, "w") as f: + json.dump(data, f) + ' "$filename" + +Making and analysing HTTP requests +--------------------------------- + +Simple HTTP GET and POST requests are simple. We make the request, +sending a body if given, and capture the response: HTTP status code, +response headers, response body. + +We make the request using the `curl` command line program, which makes +capturing the response quite convenient. + +HTTP requests can be made by various entities. This does not affect +test code, but allows for nicer scenario steps. + +We check that the HTTP status indicates success, so that every +scenario doesn't need ot check that separately. + +A GET request: + + IMPLEMENTS WHEN admin makes request GET (\S+) + > "$DATADIR/response.headers" + > "$DATADIR/response.body" + port=$(cat "$DATADIR/webapp.port") + + # The timestamp is needed by "THEN static status page got updated" + touch "$DATADIR/request.timestamp" + + curl \ + -D "$DATADIR/response.headers" \ + -o "$DATADIR/response.body" \ + --silent --show-error \ + "http://127.0.0.1:$port$MATCH_1" + cat "$DATADIR/response.headers" + cat "$DATADIR/response.body" + head -n1 "$DATADIR/response.headers" | grep '^HTTP/1\.[01] 200 ' + +A POST request always has a body. The body consists of `foo=bar` +pairs, separated by `&` signs. + + IMPLEMENTS WHEN (\S+) makes request POST (\S+) with (.*) + post_request "$MATCH_2" "$MATCH_3" + +Except, sometimes we don't have a useful body to give. So we don't. + + IMPLEMENTS WHEN (\S+) makes request POST (\S+) + post_request "$MATCH_2" dummy=value + +Check the Content-Type of the response has the desired type. + + IMPLEMENTS THEN response is (\S+) + cat "$DATADIR/response.headers" + grep -i "^Content-Type: $MATCH_1" "$DATADIR/response.headers" + +A JSON response can then be queried further. The JSON is expected to +be a dict, so that values are accessed by name from the dict. The +value is expresssed as a JSON value in the step. + + IMPLEMENTS THEN response has (\S+) set to (.+) + cat "$DATADIR/response.body" + python -c ' + import json, os, sys + data = json.load(sys.stdin) + key = os.environ["MATCH_1"] + expected = json.loads(os.environ["MATCH_2"]) + value = data[key] + if value != expected: + sys.stderr.write( + "Key {key} has value {value}, but " + "{expected} was expected".format( + key=key, value=value, expected=expected)) + sys.exit(1) + ' < "$DATADIR/response.body" + +A JSON response may need to be analysed in more depth. Specifically, +we may need to look at a list of dicts, as below. + + IMPLEMENTS THEN response has (\S+) item (\d+) field (\S+) set to (\S+) + cat "$DATADIR/response.body" + python -c ' + import json, os, sys + data = json.load(sys.stdin) + print "data:", repr(data) + items = os.environ["MATCH_1"] + print "items:", repr(items) + item = int(os.environ["MATCH_2"]) + print "item:", repr(item) + field = os.environ["MATCH_3"] + print "field:", repr(field) + print "match3:", repr(os.environ["MATCH_4"]) + expected = json.loads(os.environ["MATCH_4"]) + print "expected:", repr(expected) + print "data[items]:", repr(data[items]) + print "data[items][item]:", repr(data[items][item]) + print "data[items][item][field]:", repr(data[items][item][field]) + value = data[items][item][field] + if value != expected: + sys.stderr.write( + "Item {item} in {items} has field {field} with " + "value {value}, but {expected} was expected".format ( + item=item, items=items, field=field, value=value, + expected=expected)) + sys.exit(1) + ' < "$DATADIR/response.body" + +In some cases, such as free disk space, we don't care about the actual +value, but we do care that it is there. + + IMPLEMENTS THEN response has (\S+) set + cat "$DATADIR/response.body" + python -c ' + import json, os, sys + data = json.load(sys.stdin) + key = os.environ["MATCH_1"] + if key not in data: + sys.stderr.write( + "Key {key} is not set, but was expected to be set".format ( + key=key)) + sys.exit(1) + ' < "$DATADIR/response.body" + +Some responses are just plain text, so we match them with a regexp. + + IMPLEMENTS THEN response matches "(.*)"$ + cat "$DATADIR/response.body" + grep "$MATCH_1" "$DATADIR/response.body" + + +Status web page +--------------- + +WEBAPP is expected to update a static HTML pages whenever the +`/1.0/status` request is made. We configure WEBAPP to write it to +`$DATADIR/lc-status.html`. We don't test the contents of the page, but +we do test that it gets updated. We test for the updates by comparing +the modification time of the file with the time of the request. We +know the time of the request thanks to the "WHEN admin makes a +request" step updating the modification time of a file for this +purpose. + + IMPLEMENTS THEN static status page got updated + # test -nt isn't useful: the timestamps might be identical, and + # that's OK on filesystems that only store full-second timestamps. + # We generate timestamps in (roughly) ISO 8601 format, with stat, + # and those can be compared using simple string comparison. + + status=$(stat -c %y "$DATADIR/lc-status.html") + request=$(stat -c %y "$DATADIR/request.timestamp") + test "$request" = "$status" || test "$request" '<' "$status" + + +STATEDB +------- + +Check that the STATEDB is empty. This means it should exist, and +should be initialised, but none of the important tables should have +any rows in them. + + IMPLEMENTS THEN STATEDB is empty + test -s "$DATADIR/webapp.db" + sqlite3 "$DATADIR/webapp.db" 'SELECT * FROM troves;' | stdin_is_empty + sqlite3 "$DATADIR/webapp.db" 'SELECT * FROM lorries;' | stdin_is_empty |