summaryrefslogtreecommitdiff
path: root/tests/modules/process_test/try_execfile.py
blob: 3068327e04ff3211ca973a3c153078b4299a8534 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt

"""Test file for run_python_file.

This file is executed two ways::

    $ coverage run try_execfile.py

and::

    $ python try_execfile.py

The output is compared to see that the program execution context is the same
under coverage and under Python.

It is not crucial that the execution be identical, there are some differences
that are OK.  This program canonicalizes the output to gloss over those
differences and get a clean diff.

"""

import itertools
import json
import os
import sys

# sys.path varies by execution environments.  Coverage.py uses setuptools to
# make console scripts, which means pkg_resources is imported.  pkg_resources
# removes duplicate entries from sys.path.  So we do that too, since the extra
# entries don't affect the running of the program.

def same_file(p1, p2):
    """Determine if `p1` and `p2` refer to the same existing file."""
    if not p1:
        return not p2
    if not os.path.exists(p1):
        return False
    if not os.path.exists(p2):
        return False
    if hasattr(os.path, "samefile"):
        return os.path.samefile(p1, p2)
    else:
        norm1 = os.path.normcase(os.path.normpath(p1))
        norm2 = os.path.normcase(os.path.normpath(p2))
        return norm1 == norm2

def without_same_files(filenames):
    """Return the list `filenames` with duplicates (by same_file) removed."""
    reduced = []
    for filename in filenames:
        if not any(same_file(filename, other) for other in reduced):
            reduced.append(filename)
    return reduced

cleaned_sys_path = [os.path.normcase(p) for p in without_same_files(sys.path)]

DATA = "xyzzy"

import __main__

def my_function(a):
    """A function to force execution of module-level values."""
    return "my_fn(%r)" % a

FN_VAL = my_function("fooey")

loader = globals().get('__loader__')
fullname = getattr(loader, 'fullname', None) or getattr(loader, 'name', None)

# A more compact ad-hoc grouped-by-first-letter list of builtins.
CLUMPS = "ABC,DEF,GHI,JKLMN,OPQR,ST,U,VWXYZ_,ab,cd,efg,hij,lmno,pqr,stuvwxyz".split(",")

def word_group(w):
    """Figure out which CLUMP the first letter of w is in."""
    for i, clump in enumerate(CLUMPS):
        if w[0] in clump:
            return i
    return 99

builtin_dir = [" ".join(s) for _, s in itertools.groupby(dir(__builtins__), key=word_group)]

globals_to_check = {
    'os.getcwd': os.getcwd(),
    '__name__': __name__,
    '__file__': __file__,
    '__doc__': __doc__,
    '__builtins__.has_open': hasattr(__builtins__, 'open'),
    '__builtins__.dir': builtin_dir,
    '__loader__ exists': loader is not None,
    '__loader__.fullname': fullname,
    '__package__': __package__,
    'DATA': DATA,
    'FN_VAL': FN_VAL,
    '__main__.DATA': getattr(__main__, "DATA", "nothing"),
    'argv0': sys.argv[0],
    'argv1-n': sys.argv[1:],
    'path': cleaned_sys_path,
}

print(json.dumps(globals_to_check, indent=4, sort_keys=True))