diff options
| author | Himanshu Shekhar <himanshushekharb16@gmail.com> | 2017-04-29 22:45:04 +0530 |
|---|---|---|
| committer | Himanshu Shekhar <himanshushekharb16@gmail.com> | 2017-04-29 22:45:04 +0530 |
| commit | 5128374c015a77bb6fe596ac29a99fdce2ae90a4 (patch) | |
| tree | 9dc78e6ed837611a564a695f728d22a2408746da /scripts | |
| parent | 3527c7eb42d543aa2427be23886647c954699608 (diff) | |
| download | psutil-5128374c015a77bb6fe596ac29a99fdce2ae90a4.tar.gz | |
fix linting for ci-tests
Diffstat (limited to 'scripts')
| -rwxr-xr-x | scripts/internal/check_broken_links.py | 12 |
1 files changed, 7 insertions, 5 deletions
diff --git a/scripts/internal/check_broken_links.py b/scripts/internal/check_broken_links.py index 8a507203..ac62b05d 100755 --- a/scripts/internal/check_broken_links.py +++ b/scripts/internal/check_broken_links.py @@ -24,7 +24,7 @@ Method: REFERENCES: Using [1] with some modificatons for including ftp -[1] http://stackoverflow.com/questions/6883049/regex-to-find-urls-in-string-in-python +[1] http://stackoverflow.com/a/6883094/5163807 [2] http://stackoverflow.com/a/31952097/5163807 [3] http://daringfireball.net/2010/07/improved_regex_for_matching_urls [4] https://mathiasbynens.be/demo/url-regex @@ -46,7 +46,8 @@ import requests HERE = os.path.abspath(os.path.dirname(__file__)) -URL_REGEX = '(?:http|ftp|https)?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' +REGEX = r'(?:http|ftp|https)?://' +REGEX += r'(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' def get_urls(filename): @@ -60,7 +61,7 @@ def get_urls(filename): with open(fname) as f: text = f.read() - urls = re.findall(URL_REGEX, text) + urls = re.findall(REGEX, text) # remove duplicates, list for sets are not iterable urls = list(set(urls)) # correct urls which are between < and/or > @@ -80,7 +81,7 @@ def validate_url(url): try: res = requests.head(url) return res.ok - except Exception as e: + except requests.exceptions.RequestException: return False @@ -97,7 +98,8 @@ def main(): i += 1 if not validate_url(url): fails.append((url, fname)) - sys.stdout.write("\r " + fname + " : " + str(i) + " / " + str(last)) + sys.stdout.write("\r " + + fname + " : " + str(i) + " / " + str(last)) sys.stdout.flush() print() |
