black: Reformat without skipping string normalization.
This commit is contained in:
parent
fba21bb00d
commit
6f3f9bf7e4
178 changed files with 5242 additions and 5242 deletions
|
@ -8,96 +8,96 @@ if MYPY:
|
|||
|
||||
whitespace_rules = [
|
||||
# This linter should be first since bash_rules depends on it.
|
||||
{'pattern': r'\s+$', 'strip': '\n', 'description': 'Fix trailing whitespace'},
|
||||
{'pattern': '\t', 'strip': '\n', 'description': 'Fix tab-based whitespace'},
|
||||
{"pattern": r"\s+$", "strip": "\n", "description": "Fix trailing whitespace"},
|
||||
{"pattern": "\t", "strip": "\n", "description": "Fix tab-based whitespace"},
|
||||
] # type: List[Rule]
|
||||
|
||||
markdown_whitespace_rules = list(
|
||||
[rule for rule in whitespace_rules if rule['pattern'] != r'\s+$']
|
||||
[rule for rule in whitespace_rules if rule["pattern"] != r"\s+$"]
|
||||
) + [
|
||||
# Two spaces trailing a line with other content is okay--it's a markdown line break.
|
||||
# This rule finds one space trailing a non-space, three or more trailing spaces, and
|
||||
# spaces on an empty line.
|
||||
{
|
||||
'pattern': r'((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)',
|
||||
'strip': '\n',
|
||||
'description': 'Fix trailing whitespace',
|
||||
"pattern": r"((?<!\s)\s$)|(\s\s\s+$)|(^\s+$)",
|
||||
"strip": "\n",
|
||||
"description": "Fix trailing whitespace",
|
||||
},
|
||||
{
|
||||
'pattern': r'^#+[A-Za-z0-9]',
|
||||
'strip': '\n',
|
||||
'description': 'Missing space after # in heading',
|
||||
"pattern": r"^#+[A-Za-z0-9]",
|
||||
"strip": "\n",
|
||||
"description": "Missing space after # in heading",
|
||||
},
|
||||
]
|
||||
|
||||
python_rules = RuleList(
|
||||
langs=['py'],
|
||||
langs=["py"],
|
||||
rules=[
|
||||
{'pattern': r'".*"%\([a-z_].*\)?$', 'description': 'Missing space around "%"'},
|
||||
{'pattern': r"'.*'%\([a-z_].*\)?$", 'description': 'Missing space around "%"'},
|
||||
{"pattern": r'".*"%\([a-z_].*\)?$', "description": 'Missing space around "%"'},
|
||||
{"pattern": r"'.*'%\([a-z_].*\)?$", "description": 'Missing space around "%"'},
|
||||
# This rule is constructed with + to avoid triggering on itself
|
||||
{'pattern': r" =" + r'[^ =>~"]', 'description': 'Missing whitespace after "="'},
|
||||
{'pattern': r'":\w[^"]*$', 'description': 'Missing whitespace after ":"'},
|
||||
{'pattern': r"':\w[^']*$", 'description': 'Missing whitespace after ":"'},
|
||||
{'pattern': r"^\s+[#]\w", 'strip': '\n', 'description': 'Missing whitespace after "#"'},
|
||||
{"pattern": r" =" + r'[^ =>~"]', "description": 'Missing whitespace after "="'},
|
||||
{"pattern": r'":\w[^"]*$', "description": 'Missing whitespace after ":"'},
|
||||
{"pattern": r"':\w[^']*$", "description": 'Missing whitespace after ":"'},
|
||||
{"pattern": r"^\s+[#]\w", "strip": "\n", "description": 'Missing whitespace after "#"'},
|
||||
{
|
||||
'pattern': r"assertEquals[(]",
|
||||
'description': 'Use assertEqual, not assertEquals (which is deprecated).',
|
||||
"pattern": r"assertEquals[(]",
|
||||
"description": "Use assertEqual, not assertEquals (which is deprecated).",
|
||||
},
|
||||
{
|
||||
'pattern': r'self: Any',
|
||||
'description': 'you can omit Any annotation for self',
|
||||
'good_lines': ['def foo (self):'],
|
||||
'bad_lines': ['def foo(self: Any):'],
|
||||
"pattern": r"self: Any",
|
||||
"description": "you can omit Any annotation for self",
|
||||
"good_lines": ["def foo (self):"],
|
||||
"bad_lines": ["def foo(self: Any):"],
|
||||
},
|
||||
{'pattern': r"== None", 'description': 'Use `is None` to check whether something is None'},
|
||||
{'pattern': r"type:[(]", 'description': 'Missing whitespace after ":" in type annotation'},
|
||||
{'pattern': r"# type [(]", 'description': 'Missing : after type in type annotation'},
|
||||
{'pattern': r"#type", 'description': 'Missing whitespace after "#" in type annotation'},
|
||||
{'pattern': r'if[(]', 'description': 'Missing space between if and ('},
|
||||
{'pattern': r", [)]", 'description': 'Unnecessary whitespace between "," and ")"'},
|
||||
{'pattern': r"% [(]", 'description': 'Unnecessary whitespace between "%" and "("'},
|
||||
{"pattern": r"== None", "description": "Use `is None` to check whether something is None"},
|
||||
{"pattern": r"type:[(]", "description": 'Missing whitespace after ":" in type annotation'},
|
||||
{"pattern": r"# type [(]", "description": "Missing : after type in type annotation"},
|
||||
{"pattern": r"#type", "description": 'Missing whitespace after "#" in type annotation'},
|
||||
{"pattern": r"if[(]", "description": "Missing space between if and ("},
|
||||
{"pattern": r", [)]", "description": 'Unnecessary whitespace between "," and ")"'},
|
||||
{"pattern": r"% [(]", "description": 'Unnecessary whitespace between "%" and "("'},
|
||||
# This next check could have false positives, but it seems pretty
|
||||
# rare; if we find any, they can be added to the exclude list for
|
||||
# this rule.
|
||||
{
|
||||
'pattern': r' % [a-zA-Z0-9_.]*\)?$',
|
||||
'description': 'Used % comprehension without a tuple',
|
||||
"pattern": r" % [a-zA-Z0-9_.]*\)?$",
|
||||
"description": "Used % comprehension without a tuple",
|
||||
},
|
||||
{
|
||||
'pattern': r'.*%s.* % \([a-zA-Z0-9_.]*\)$',
|
||||
'description': 'Used % comprehension without a tuple',
|
||||
"pattern": r".*%s.* % \([a-zA-Z0-9_.]*\)$",
|
||||
"description": "Used % comprehension without a tuple",
|
||||
},
|
||||
{
|
||||
'pattern': r'__future__',
|
||||
'include_only': {'zulip_bots/zulip_bots/bots/'},
|
||||
'description': 'Bots no longer need __future__ imports.',
|
||||
"pattern": r"__future__",
|
||||
"include_only": {"zulip_bots/zulip_bots/bots/"},
|
||||
"description": "Bots no longer need __future__ imports.",
|
||||
},
|
||||
{
|
||||
'pattern': r'#!/usr/bin/env python$',
|
||||
'include_only': {'zulip_bots/'},
|
||||
'description': 'Python shebangs must be python3',
|
||||
"pattern": r"#!/usr/bin/env python$",
|
||||
"include_only": {"zulip_bots/"},
|
||||
"description": "Python shebangs must be python3",
|
||||
},
|
||||
{
|
||||
'pattern': r'(^|\s)open\s*\(',
|
||||
'description': 'open() should not be used in Zulip\'s bots. Use functions'
|
||||
' provided by the bots framework to access the filesystem.',
|
||||
'include_only': {'zulip_bots/zulip_bots/bots/'},
|
||||
"pattern": r"(^|\s)open\s*\(",
|
||||
"description": "open() should not be used in Zulip's bots. Use functions"
|
||||
" provided by the bots framework to access the filesystem.",
|
||||
"include_only": {"zulip_bots/zulip_bots/bots/"},
|
||||
},
|
||||
{
|
||||
'pattern': r'pprint',
|
||||
'description': 'Used pprint, which is most likely a debugging leftover. For user output, use print().',
|
||||
"pattern": r"pprint",
|
||||
"description": "Used pprint, which is most likely a debugging leftover. For user output, use print().",
|
||||
},
|
||||
{
|
||||
'pattern': r'\(BotTestCase\)',
|
||||
'bad_lines': ['class TestSomeBot(BotTestCase):'],
|
||||
'description': 'Bot test cases should directly inherit from BotTestCase *and* DefaultTests.',
|
||||
"pattern": r"\(BotTestCase\)",
|
||||
"bad_lines": ["class TestSomeBot(BotTestCase):"],
|
||||
"description": "Bot test cases should directly inherit from BotTestCase *and* DefaultTests.",
|
||||
},
|
||||
{
|
||||
'pattern': r'\(DefaultTests, BotTestCase\)',
|
||||
'bad_lines': ['class TestSomeBot(DefaultTests, BotTestCase):'],
|
||||
'good_lines': ['class TestSomeBot(BotTestCase, DefaultTests):'],
|
||||
'description': 'Bot test cases should inherit from BotTestCase before DefaultTests.',
|
||||
"pattern": r"\(DefaultTests, BotTestCase\)",
|
||||
"bad_lines": ["class TestSomeBot(DefaultTests, BotTestCase):"],
|
||||
"good_lines": ["class TestSomeBot(BotTestCase, DefaultTests):"],
|
||||
"description": "Bot test cases should inherit from BotTestCase before DefaultTests.",
|
||||
},
|
||||
*whitespace_rules,
|
||||
],
|
||||
|
@ -105,12 +105,12 @@ python_rules = RuleList(
|
|||
)
|
||||
|
||||
bash_rules = RuleList(
|
||||
langs=['sh'],
|
||||
langs=["sh"],
|
||||
rules=[
|
||||
{
|
||||
'pattern': r'#!.*sh [-xe]',
|
||||
'description': 'Fix shebang line with proper call to /usr/bin/env for Bash path, change -x|-e switches'
|
||||
' to set -x|set -e',
|
||||
"pattern": r"#!.*sh [-xe]",
|
||||
"description": "Fix shebang line with proper call to /usr/bin/env for Bash path, change -x|-e switches"
|
||||
" to set -x|set -e",
|
||||
},
|
||||
*whitespace_rules[0:1],
|
||||
],
|
||||
|
@ -118,7 +118,7 @@ bash_rules = RuleList(
|
|||
|
||||
|
||||
json_rules = RuleList(
|
||||
langs=['json'],
|
||||
langs=["json"],
|
||||
# Here, we don't check tab-based whitespace, because the tab-based
|
||||
# whitespace rule flags a lot of third-party JSON fixtures
|
||||
# under zerver/webhooks that we want preserved verbatim. So
|
||||
|
@ -131,21 +131,21 @@ json_rules = RuleList(
|
|||
|
||||
prose_style_rules = [
|
||||
{
|
||||
'pattern': r'[^\/\#\-"]([jJ]avascript)', # exclude usage in hrefs/divs
|
||||
'description': "javascript should be spelled JavaScript",
|
||||
"pattern": r'[^\/\#\-"]([jJ]avascript)', # exclude usage in hrefs/divs
|
||||
"description": "javascript should be spelled JavaScript",
|
||||
},
|
||||
{
|
||||
'pattern': r'''[^\/\-\."'\_\=\>]([gG]ithub)[^\.\-\_"\<]''', # exclude usage in hrefs/divs
|
||||
'description': "github should be spelled GitHub",
|
||||
"pattern": r"""[^\/\-\."'\_\=\>]([gG]ithub)[^\.\-\_"\<]""", # exclude usage in hrefs/divs
|
||||
"description": "github should be spelled GitHub",
|
||||
},
|
||||
{
|
||||
'pattern': r'[oO]rganisation', # exclude usage in hrefs/divs
|
||||
'description': "Organization is spelled with a z",
|
||||
"pattern": r"[oO]rganisation", # exclude usage in hrefs/divs
|
||||
"description": "Organization is spelled with a z",
|
||||
},
|
||||
{'pattern': r'!!! warning', 'description': "!!! warning is invalid; it's spelled '!!! warn'"},
|
||||
{"pattern": r"!!! warning", "description": "!!! warning is invalid; it's spelled '!!! warn'"},
|
||||
{
|
||||
'pattern': r'[^-_]botserver(?!rc)|bot server',
|
||||
'description': "Use Botserver instead of botserver or Botserver.",
|
||||
"pattern": r"[^-_]botserver(?!rc)|bot server",
|
||||
"description": "Use Botserver instead of botserver or Botserver.",
|
||||
},
|
||||
] # type: List[Rule]
|
||||
|
||||
|
@ -154,13 +154,13 @@ markdown_docs_length_exclude = {
|
|||
}
|
||||
|
||||
markdown_rules = RuleList(
|
||||
langs=['md'],
|
||||
langs=["md"],
|
||||
rules=[
|
||||
*markdown_whitespace_rules,
|
||||
*prose_style_rules,
|
||||
{
|
||||
'pattern': r'\[(?P<url>[^\]]+)\]\((?P=url)\)',
|
||||
'description': 'Linkified markdown URLs should use cleaner <http://example.com> syntax.',
|
||||
"pattern": r"\[(?P<url>[^\]]+)\]\((?P=url)\)",
|
||||
"description": "Linkified markdown URLs should use cleaner <http://example.com> syntax.",
|
||||
},
|
||||
],
|
||||
max_length=120,
|
||||
|
@ -168,7 +168,7 @@ markdown_rules = RuleList(
|
|||
)
|
||||
|
||||
txt_rules = RuleList(
|
||||
langs=['txt'],
|
||||
langs=["txt"],
|
||||
rules=whitespace_rules,
|
||||
)
|
||||
|
||||
|
|
204
tools/deploy
204
tools/deploy
|
@ -11,69 +11,69 @@ from typing import Any, Callable, Dict, List
|
|||
import requests
|
||||
from requests import Response
|
||||
|
||||
red = '\033[91m' # type: str
|
||||
green = '\033[92m' # type: str
|
||||
end_format = '\033[0m' # type: str
|
||||
bold = '\033[1m' # type: str
|
||||
red = "\033[91m" # type: str
|
||||
green = "\033[92m" # type: str
|
||||
end_format = "\033[0m" # type: str
|
||||
bold = "\033[1m" # type: str
|
||||
|
||||
bots_dir = '.bots' # type: str
|
||||
bots_dir = ".bots" # type: str
|
||||
|
||||
|
||||
def pack(options: argparse.Namespace) -> None:
|
||||
# Basic sanity checks for input.
|
||||
if not options.path:
|
||||
print('tools/deploy: Path to bot folder not specified.')
|
||||
print("tools/deploy: Path to bot folder not specified.")
|
||||
sys.exit(1)
|
||||
if not options.config:
|
||||
print('tools/deploy: Path to zuliprc not specified.')
|
||||
print("tools/deploy: Path to zuliprc not specified.")
|
||||
sys.exit(1)
|
||||
if not options.main:
|
||||
print('tools/deploy: No main bot file specified.')
|
||||
print("tools/deploy: No main bot file specified.")
|
||||
sys.exit(1)
|
||||
if not os.path.isfile(options.config):
|
||||
print('pack: Config file not found at path: {}.'.format(options.config))
|
||||
print("pack: Config file not found at path: {}.".format(options.config))
|
||||
sys.exit(1)
|
||||
if not os.path.isdir(options.path):
|
||||
print('pack: Bot folder not found at path: {}.'.format(options.path))
|
||||
print("pack: Bot folder not found at path: {}.".format(options.path))
|
||||
sys.exit(1)
|
||||
main_path = os.path.join(options.path, options.main)
|
||||
if not os.path.isfile(main_path):
|
||||
print('pack: Bot main file not found at path: {}.'.format(main_path))
|
||||
print("pack: Bot main file not found at path: {}.".format(main_path))
|
||||
sys.exit(1)
|
||||
|
||||
# Main logic for packing the bot.
|
||||
if not os.path.exists(bots_dir):
|
||||
os.makedirs(bots_dir)
|
||||
zip_file_path = os.path.join(bots_dir, options.botname + ".zip")
|
||||
zip_file = zipfile.ZipFile(zip_file_path, 'w', zipfile.ZIP_DEFLATED)
|
||||
zip_file = zipfile.ZipFile(zip_file_path, "w", zipfile.ZIP_DEFLATED)
|
||||
# Pack the complete bot folder
|
||||
for root, dirs, files in os.walk(options.path):
|
||||
for file in files:
|
||||
file_path = os.path.join(root, file)
|
||||
zip_file.write(file_path, os.path.relpath(file_path, options.path))
|
||||
# Pack the zuliprc
|
||||
zip_file.write(options.config, 'zuliprc')
|
||||
zip_file.write(options.config, "zuliprc")
|
||||
# Pack the config file for the botfarm.
|
||||
bot_config = textwrap.dedent(
|
||||
'''\
|
||||
"""\
|
||||
[deploy]
|
||||
bot={}
|
||||
zuliprc=zuliprc
|
||||
'''.format(
|
||||
""".format(
|
||||
options.main
|
||||
)
|
||||
)
|
||||
zip_file.writestr('config.ini', bot_config)
|
||||
zip_file.writestr("config.ini", bot_config)
|
||||
zip_file.close()
|
||||
print('pack: Created zip file at: {}.'.format(zip_file_path))
|
||||
print("pack: Created zip file at: {}.".format(zip_file_path))
|
||||
|
||||
|
||||
def check_common_options(options: argparse.Namespace) -> None:
|
||||
if not options.server:
|
||||
print('tools/deploy: URL to Botfarm server not specified.')
|
||||
print("tools/deploy: URL to Botfarm server not specified.")
|
||||
sys.exit(1)
|
||||
if not options.token:
|
||||
print('tools/deploy: Botfarm deploy token not specified.')
|
||||
print("tools/deploy: Botfarm deploy token not specified.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -83,7 +83,7 @@ def handle_common_response_without_data(
|
|||
return handle_common_response(
|
||||
response=response,
|
||||
operation=operation,
|
||||
success_handler=lambda r: print('{}: {}'.format(operation, success_message)),
|
||||
success_handler=lambda r: print("{}: {}".format(operation, success_message)),
|
||||
)
|
||||
|
||||
|
||||
|
@ -92,56 +92,56 @@ def handle_common_response(
|
|||
) -> bool:
|
||||
if response.status_code == requests.codes.ok:
|
||||
response_data = response.json()
|
||||
if response_data['status'] == 'success':
|
||||
if response_data["status"] == "success":
|
||||
success_handler(response_data)
|
||||
return True
|
||||
elif response_data['status'] == 'error':
|
||||
print('{}: {}'.format(operation, response_data['message']))
|
||||
elif response_data["status"] == "error":
|
||||
print("{}: {}".format(operation, response_data["message"]))
|
||||
return False
|
||||
else:
|
||||
print('{}: Unexpected success response format'.format(operation))
|
||||
print("{}: Unexpected success response format".format(operation))
|
||||
return False
|
||||
if response.status_code == requests.codes.unauthorized:
|
||||
print('{}: Authentication error with the server. Aborting.'.format(operation))
|
||||
print("{}: Authentication error with the server. Aborting.".format(operation))
|
||||
else:
|
||||
print('{}: Error {}. Aborting.'.format(operation, response.status_code))
|
||||
print("{}: Error {}. Aborting.".format(operation, response.status_code))
|
||||
return False
|
||||
|
||||
|
||||
def upload(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
file_path = os.path.join(bots_dir, options.botname + '.zip')
|
||||
file_path = os.path.join(bots_dir, options.botname + ".zip")
|
||||
if not os.path.exists(file_path):
|
||||
print('upload: Could not find bot package at {}.'.format(file_path))
|
||||
print("upload: Could not find bot package at {}.".format(file_path))
|
||||
sys.exit(1)
|
||||
files = {'file': open(file_path, 'rb')}
|
||||
headers = {'key': options.token}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/upload')
|
||||
files = {"file": open(file_path, "rb")}
|
||||
headers = {"key": options.token}
|
||||
url = urllib.parse.urljoin(options.server, "bots/upload")
|
||||
response = requests.post(url, files=files, headers=headers)
|
||||
result = handle_common_response_without_data(
|
||||
response, 'upload', 'Uploaded the bot package to botfarm.'
|
||||
response, "upload", "Uploaded the bot package to botfarm."
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def clean(options: argparse.Namespace) -> None:
|
||||
file_path = os.path.join(bots_dir, options.botname + '.zip')
|
||||
file_path = os.path.join(bots_dir, options.botname + ".zip")
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
print('clean: Removed {}.'.format(file_path))
|
||||
print("clean: Removed {}.".format(file_path))
|
||||
else:
|
||||
print('clean: File \'{}\' not found.'.format(file_path))
|
||||
print("clean: File '{}' not found.".format(file_path))
|
||||
|
||||
|
||||
def process(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/process')
|
||||
payload = {'name': options.botname}
|
||||
headers = {"key": options.token}
|
||||
url = urllib.parse.urljoin(options.server, "bots/process")
|
||||
payload = {"name": options.botname}
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
result = handle_common_response_without_data(
|
||||
response, 'process', 'The bot has been processed by the botfarm.'
|
||||
response, "process", "The bot has been processed by the botfarm."
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
@ -149,12 +149,12 @@ def process(options: argparse.Namespace) -> None:
|
|||
|
||||
def start(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/start')
|
||||
payload = {'name': options.botname}
|
||||
headers = {"key": options.token}
|
||||
url = urllib.parse.urljoin(options.server, "bots/start")
|
||||
payload = {"name": options.botname}
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
result = handle_common_response_without_data(
|
||||
response, 'start', 'The bot has been started by the botfarm.'
|
||||
response, "start", "The bot has been started by the botfarm."
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
@ -162,12 +162,12 @@ def start(options: argparse.Namespace) -> None:
|
|||
|
||||
def stop(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/stop')
|
||||
payload = {'name': options.botname}
|
||||
headers = {"key": options.token}
|
||||
url = urllib.parse.urljoin(options.server, "bots/stop")
|
||||
payload = {"name": options.botname}
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
result = handle_common_response_without_data(
|
||||
response, 'stop', 'The bot has been stopped by the botfarm.'
|
||||
response, "stop", "The bot has been stopped by the botfarm."
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
@ -182,27 +182,27 @@ def prepare(options: argparse.Namespace) -> None:
|
|||
|
||||
def log(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
headers = {"key": options.token}
|
||||
if options.lines:
|
||||
lines = options.lines
|
||||
else:
|
||||
lines = None
|
||||
payload = {'name': options.botname, 'lines': lines}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/logs/' + options.botname)
|
||||
payload = {"name": options.botname, "lines": lines}
|
||||
url = urllib.parse.urljoin(options.server, "bots/logs/" + options.botname)
|
||||
response = requests.get(url, json=payload, headers=headers)
|
||||
result = handle_common_response(response, 'log', lambda r: print(r['logs']['content']))
|
||||
result = handle_common_response(response, "log", lambda r: print(r["logs"]["content"]))
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def delete(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
url = urllib.parse.urljoin(options.server, 'bots/delete')
|
||||
payload = {'name': options.botname}
|
||||
headers = {"key": options.token}
|
||||
url = urllib.parse.urljoin(options.server, "bots/delete")
|
||||
payload = {"name": options.botname}
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
result = handle_common_response_without_data(
|
||||
response, 'delete', 'The bot has been removed from the botfarm.'
|
||||
response, "delete", "The bot has been removed from the botfarm."
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
@ -210,15 +210,15 @@ def delete(options: argparse.Namespace) -> None:
|
|||
|
||||
def list_bots(options: argparse.Namespace) -> None:
|
||||
check_common_options(options)
|
||||
headers = {'key': options.token}
|
||||
headers = {"key": options.token}
|
||||
if options.format:
|
||||
pretty_print = True
|
||||
else:
|
||||
pretty_print = False
|
||||
url = urllib.parse.urljoin(options.server, 'bots/list')
|
||||
url = urllib.parse.urljoin(options.server, "bots/list")
|
||||
response = requests.get(url, headers=headers)
|
||||
result = handle_common_response(
|
||||
response, 'ls', lambda r: print_bots(r['bots']['list'], pretty_print)
|
||||
response, "ls", lambda r: print_bots(r["bots"]["list"], pretty_print)
|
||||
)
|
||||
if result is False:
|
||||
sys.exit(1)
|
||||
|
@ -229,36 +229,36 @@ def print_bots(bots: List[Any], pretty_print: bool) -> None:
|
|||
print_bots_pretty(bots)
|
||||
else:
|
||||
for bot in bots:
|
||||
print('{}\t{}\t{}\t{}'.format(bot['name'], bot['status'], bot['email'], bot['site']))
|
||||
print("{}\t{}\t{}\t{}".format(bot["name"], bot["status"], bot["email"], bot["site"]))
|
||||
|
||||
|
||||
def print_bots_pretty(bots: List[Any]) -> None:
|
||||
if len(bots) == 0:
|
||||
print('ls: No bots found on the botfarm')
|
||||
print("ls: No bots found on the botfarm")
|
||||
else:
|
||||
print('ls: There are the following bots on the botfarm:')
|
||||
print("ls: There are the following bots on the botfarm:")
|
||||
name_col_len, status_col_len, email_col_len, site_col_len = 25, 15, 35, 35
|
||||
row_format = '{0} {1} {2} {3}'
|
||||
row_format = "{0} {1} {2} {3}"
|
||||
header = row_format.format(
|
||||
'NAME'.rjust(name_col_len),
|
||||
'STATUS'.rjust(status_col_len),
|
||||
'EMAIL'.rjust(email_col_len),
|
||||
'SITE'.rjust(site_col_len),
|
||||
"NAME".rjust(name_col_len),
|
||||
"STATUS".rjust(status_col_len),
|
||||
"EMAIL".rjust(email_col_len),
|
||||
"SITE".rjust(site_col_len),
|
||||
)
|
||||
header_bottom = row_format.format(
|
||||
'-' * name_col_len,
|
||||
'-' * status_col_len,
|
||||
'-' * email_col_len,
|
||||
'-' * site_col_len,
|
||||
"-" * name_col_len,
|
||||
"-" * status_col_len,
|
||||
"-" * email_col_len,
|
||||
"-" * site_col_len,
|
||||
)
|
||||
print(header)
|
||||
print(header_bottom)
|
||||
for bot in bots:
|
||||
row = row_format.format(
|
||||
bot['name'].rjust(name_col_len),
|
||||
bot['status'].rjust(status_col_len),
|
||||
bot['email'].rjust(email_col_len),
|
||||
bot['site'].rjust(site_col_len),
|
||||
bot["name"].rjust(name_col_len),
|
||||
bot["status"].rjust(status_col_len),
|
||||
bot["email"].rjust(email_col_len),
|
||||
bot["site"].rjust(site_col_len),
|
||||
)
|
||||
print(row)
|
||||
|
||||
|
@ -297,52 +297,52 @@ To list user's bots, use:
|
|||
|
||||
"""
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
parser.add_argument('command', help='Command to run.')
|
||||
parser.add_argument('botname', nargs='?', help='Name of bot to operate on.')
|
||||
parser.add_argument("command", help="Command to run.")
|
||||
parser.add_argument("botname", nargs="?", help="Name of bot to operate on.")
|
||||
parser.add_argument(
|
||||
'--server',
|
||||
'-s',
|
||||
metavar='SERVERURL',
|
||||
default=os.environ.get('SERVER', ''),
|
||||
help='Url of the Zulip Botfarm server.',
|
||||
"--server",
|
||||
"-s",
|
||||
metavar="SERVERURL",
|
||||
default=os.environ.get("SERVER", ""),
|
||||
help="Url of the Zulip Botfarm server.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--token', '-t', default=os.environ.get('TOKEN', ''), help='Deploy Token for the Botfarm.'
|
||||
"--token", "-t", default=os.environ.get("TOKEN", ""), help="Deploy Token for the Botfarm."
|
||||
)
|
||||
parser.add_argument('--path', '-p', help='Path to the bot directory.')
|
||||
parser.add_argument('--config', '-c', help='Path to the zuliprc file.')
|
||||
parser.add_argument("--path", "-p", help="Path to the bot directory.")
|
||||
parser.add_argument("--config", "-c", help="Path to the zuliprc file.")
|
||||
parser.add_argument(
|
||||
'--main', '-m', help='Path to the bot\'s main file, relative to the bot\'s directory.'
|
||||
"--main", "-m", help="Path to the bot's main file, relative to the bot's directory."
|
||||
)
|
||||
parser.add_argument('--lines', '-l', help='Number of lines in log required.')
|
||||
parser.add_argument("--lines", "-l", help="Number of lines in log required.")
|
||||
parser.add_argument(
|
||||
'--format', '-f', action='store_true', help='Print user\'s bots in human readable format'
|
||||
"--format", "-f", action="store_true", help="Print user's bots in human readable format"
|
||||
)
|
||||
options = parser.parse_args()
|
||||
if not options.command:
|
||||
print('tools/deploy: No command specified.')
|
||||
print("tools/deploy: No command specified.")
|
||||
sys.exit(1)
|
||||
if not options.botname and options.command not in ['ls']:
|
||||
print('tools/deploy: No bot name specified. Please specify a name like \'my-custom-bot\'')
|
||||
if not options.botname and options.command not in ["ls"]:
|
||||
print("tools/deploy: No bot name specified. Please specify a name like 'my-custom-bot'")
|
||||
sys.exit(1)
|
||||
|
||||
commands = {
|
||||
'pack': pack,
|
||||
'upload': upload,
|
||||
'clean': clean,
|
||||
'prepare': prepare,
|
||||
'process': process,
|
||||
'start': start,
|
||||
'stop': stop,
|
||||
'log': log,
|
||||
'delete': delete,
|
||||
'ls': list_bots,
|
||||
"pack": pack,
|
||||
"upload": upload,
|
||||
"clean": clean,
|
||||
"prepare": prepare,
|
||||
"process": process,
|
||||
"start": start,
|
||||
"stop": stop,
|
||||
"log": log,
|
||||
"delete": delete,
|
||||
"ls": list_bots,
|
||||
}
|
||||
if options.command in commands:
|
||||
commands[options.command](options)
|
||||
else:
|
||||
print('tools/deploy: No command \'{}\' found.'.format(options.command))
|
||||
print("tools/deploy: No command '{}' found.".format(options.command))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -11,246 +11,246 @@ from gitlint.rules import CommitMessageTitle, LineRule, RuleViolation
|
|||
# License: MIT
|
||||
# Ref: fit_commit/validators/tense.rb
|
||||
WORD_SET = {
|
||||
'adds',
|
||||
'adding',
|
||||
'added',
|
||||
'allows',
|
||||
'allowing',
|
||||
'allowed',
|
||||
'amends',
|
||||
'amending',
|
||||
'amended',
|
||||
'bumps',
|
||||
'bumping',
|
||||
'bumped',
|
||||
'calculates',
|
||||
'calculating',
|
||||
'calculated',
|
||||
'changes',
|
||||
'changing',
|
||||
'changed',
|
||||
'cleans',
|
||||
'cleaning',
|
||||
'cleaned',
|
||||
'commits',
|
||||
'committing',
|
||||
'committed',
|
||||
'corrects',
|
||||
'correcting',
|
||||
'corrected',
|
||||
'creates',
|
||||
'creating',
|
||||
'created',
|
||||
'darkens',
|
||||
'darkening',
|
||||
'darkened',
|
||||
'disables',
|
||||
'disabling',
|
||||
'disabled',
|
||||
'displays',
|
||||
'displaying',
|
||||
'displayed',
|
||||
'documents',
|
||||
'documenting',
|
||||
'documented',
|
||||
'drys',
|
||||
'drying',
|
||||
'dryed',
|
||||
'ends',
|
||||
'ending',
|
||||
'ended',
|
||||
'enforces',
|
||||
'enforcing',
|
||||
'enforced',
|
||||
'enqueues',
|
||||
'enqueuing',
|
||||
'enqueued',
|
||||
'extracts',
|
||||
'extracting',
|
||||
'extracted',
|
||||
'finishes',
|
||||
'finishing',
|
||||
'finished',
|
||||
'fixes',
|
||||
'fixing',
|
||||
'fixed',
|
||||
'formats',
|
||||
'formatting',
|
||||
'formatted',
|
||||
'guards',
|
||||
'guarding',
|
||||
'guarded',
|
||||
'handles',
|
||||
'handling',
|
||||
'handled',
|
||||
'hides',
|
||||
'hiding',
|
||||
'hid',
|
||||
'increases',
|
||||
'increasing',
|
||||
'increased',
|
||||
'ignores',
|
||||
'ignoring',
|
||||
'ignored',
|
||||
'implements',
|
||||
'implementing',
|
||||
'implemented',
|
||||
'improves',
|
||||
'improving',
|
||||
'improved',
|
||||
'keeps',
|
||||
'keeping',
|
||||
'kept',
|
||||
'kills',
|
||||
'killing',
|
||||
'killed',
|
||||
'makes',
|
||||
'making',
|
||||
'made',
|
||||
'merges',
|
||||
'merging',
|
||||
'merged',
|
||||
'moves',
|
||||
'moving',
|
||||
'moved',
|
||||
'permits',
|
||||
'permitting',
|
||||
'permitted',
|
||||
'prevents',
|
||||
'preventing',
|
||||
'prevented',
|
||||
'pushes',
|
||||
'pushing',
|
||||
'pushed',
|
||||
'rebases',
|
||||
'rebasing',
|
||||
'rebased',
|
||||
'refactors',
|
||||
'refactoring',
|
||||
'refactored',
|
||||
'removes',
|
||||
'removing',
|
||||
'removed',
|
||||
'renames',
|
||||
'renaming',
|
||||
'renamed',
|
||||
'reorders',
|
||||
'reordering',
|
||||
'reordered',
|
||||
'replaces',
|
||||
'replacing',
|
||||
'replaced',
|
||||
'requires',
|
||||
'requiring',
|
||||
'required',
|
||||
'restores',
|
||||
'restoring',
|
||||
'restored',
|
||||
'sends',
|
||||
'sending',
|
||||
'sent',
|
||||
'sets',
|
||||
'setting',
|
||||
'separates',
|
||||
'separating',
|
||||
'separated',
|
||||
'shows',
|
||||
'showing',
|
||||
'showed',
|
||||
'simplifies',
|
||||
'simplifying',
|
||||
'simplified',
|
||||
'skips',
|
||||
'skipping',
|
||||
'skipped',
|
||||
'sorts',
|
||||
'sorting',
|
||||
'speeds',
|
||||
'speeding',
|
||||
'sped',
|
||||
'starts',
|
||||
'starting',
|
||||
'started',
|
||||
'supports',
|
||||
'supporting',
|
||||
'supported',
|
||||
'takes',
|
||||
'taking',
|
||||
'took',
|
||||
'testing',
|
||||
'tested', # 'tests' excluded to reduce false negative
|
||||
'truncates',
|
||||
'truncating',
|
||||
'truncated',
|
||||
'updates',
|
||||
'updating',
|
||||
'updated',
|
||||
'uses',
|
||||
'using',
|
||||
'used',
|
||||
"adds",
|
||||
"adding",
|
||||
"added",
|
||||
"allows",
|
||||
"allowing",
|
||||
"allowed",
|
||||
"amends",
|
||||
"amending",
|
||||
"amended",
|
||||
"bumps",
|
||||
"bumping",
|
||||
"bumped",
|
||||
"calculates",
|
||||
"calculating",
|
||||
"calculated",
|
||||
"changes",
|
||||
"changing",
|
||||
"changed",
|
||||
"cleans",
|
||||
"cleaning",
|
||||
"cleaned",
|
||||
"commits",
|
||||
"committing",
|
||||
"committed",
|
||||
"corrects",
|
||||
"correcting",
|
||||
"corrected",
|
||||
"creates",
|
||||
"creating",
|
||||
"created",
|
||||
"darkens",
|
||||
"darkening",
|
||||
"darkened",
|
||||
"disables",
|
||||
"disabling",
|
||||
"disabled",
|
||||
"displays",
|
||||
"displaying",
|
||||
"displayed",
|
||||
"documents",
|
||||
"documenting",
|
||||
"documented",
|
||||
"drys",
|
||||
"drying",
|
||||
"dryed",
|
||||
"ends",
|
||||
"ending",
|
||||
"ended",
|
||||
"enforces",
|
||||
"enforcing",
|
||||
"enforced",
|
||||
"enqueues",
|
||||
"enqueuing",
|
||||
"enqueued",
|
||||
"extracts",
|
||||
"extracting",
|
||||
"extracted",
|
||||
"finishes",
|
||||
"finishing",
|
||||
"finished",
|
||||
"fixes",
|
||||
"fixing",
|
||||
"fixed",
|
||||
"formats",
|
||||
"formatting",
|
||||
"formatted",
|
||||
"guards",
|
||||
"guarding",
|
||||
"guarded",
|
||||
"handles",
|
||||
"handling",
|
||||
"handled",
|
||||
"hides",
|
||||
"hiding",
|
||||
"hid",
|
||||
"increases",
|
||||
"increasing",
|
||||
"increased",
|
||||
"ignores",
|
||||
"ignoring",
|
||||
"ignored",
|
||||
"implements",
|
||||
"implementing",
|
||||
"implemented",
|
||||
"improves",
|
||||
"improving",
|
||||
"improved",
|
||||
"keeps",
|
||||
"keeping",
|
||||
"kept",
|
||||
"kills",
|
||||
"killing",
|
||||
"killed",
|
||||
"makes",
|
||||
"making",
|
||||
"made",
|
||||
"merges",
|
||||
"merging",
|
||||
"merged",
|
||||
"moves",
|
||||
"moving",
|
||||
"moved",
|
||||
"permits",
|
||||
"permitting",
|
||||
"permitted",
|
||||
"prevents",
|
||||
"preventing",
|
||||
"prevented",
|
||||
"pushes",
|
||||
"pushing",
|
||||
"pushed",
|
||||
"rebases",
|
||||
"rebasing",
|
||||
"rebased",
|
||||
"refactors",
|
||||
"refactoring",
|
||||
"refactored",
|
||||
"removes",
|
||||
"removing",
|
||||
"removed",
|
||||
"renames",
|
||||
"renaming",
|
||||
"renamed",
|
||||
"reorders",
|
||||
"reordering",
|
||||
"reordered",
|
||||
"replaces",
|
||||
"replacing",
|
||||
"replaced",
|
||||
"requires",
|
||||
"requiring",
|
||||
"required",
|
||||
"restores",
|
||||
"restoring",
|
||||
"restored",
|
||||
"sends",
|
||||
"sending",
|
||||
"sent",
|
||||
"sets",
|
||||
"setting",
|
||||
"separates",
|
||||
"separating",
|
||||
"separated",
|
||||
"shows",
|
||||
"showing",
|
||||
"showed",
|
||||
"simplifies",
|
||||
"simplifying",
|
||||
"simplified",
|
||||
"skips",
|
||||
"skipping",
|
||||
"skipped",
|
||||
"sorts",
|
||||
"sorting",
|
||||
"speeds",
|
||||
"speeding",
|
||||
"sped",
|
||||
"starts",
|
||||
"starting",
|
||||
"started",
|
||||
"supports",
|
||||
"supporting",
|
||||
"supported",
|
||||
"takes",
|
||||
"taking",
|
||||
"took",
|
||||
"testing",
|
||||
"tested", # 'tests' excluded to reduce false negative
|
||||
"truncates",
|
||||
"truncating",
|
||||
"truncated",
|
||||
"updates",
|
||||
"updating",
|
||||
"updated",
|
||||
"uses",
|
||||
"using",
|
||||
"used",
|
||||
}
|
||||
|
||||
imperative_forms = [
|
||||
'add',
|
||||
'allow',
|
||||
'amend',
|
||||
'bump',
|
||||
'calculate',
|
||||
'change',
|
||||
'clean',
|
||||
'commit',
|
||||
'correct',
|
||||
'create',
|
||||
'darken',
|
||||
'disable',
|
||||
'display',
|
||||
'document',
|
||||
'dry',
|
||||
'end',
|
||||
'enforce',
|
||||
'enqueue',
|
||||
'extract',
|
||||
'finish',
|
||||
'fix',
|
||||
'format',
|
||||
'guard',
|
||||
'handle',
|
||||
'hide',
|
||||
'ignore',
|
||||
'implement',
|
||||
'improve',
|
||||
'increase',
|
||||
'keep',
|
||||
'kill',
|
||||
'make',
|
||||
'merge',
|
||||
'move',
|
||||
'permit',
|
||||
'prevent',
|
||||
'push',
|
||||
'rebase',
|
||||
'refactor',
|
||||
'remove',
|
||||
'rename',
|
||||
'reorder',
|
||||
'replace',
|
||||
'require',
|
||||
'restore',
|
||||
'send',
|
||||
'separate',
|
||||
'set',
|
||||
'show',
|
||||
'simplify',
|
||||
'skip',
|
||||
'sort',
|
||||
'speed',
|
||||
'start',
|
||||
'support',
|
||||
'take',
|
||||
'test',
|
||||
'truncate',
|
||||
'update',
|
||||
'use',
|
||||
"add",
|
||||
"allow",
|
||||
"amend",
|
||||
"bump",
|
||||
"calculate",
|
||||
"change",
|
||||
"clean",
|
||||
"commit",
|
||||
"correct",
|
||||
"create",
|
||||
"darken",
|
||||
"disable",
|
||||
"display",
|
||||
"document",
|
||||
"dry",
|
||||
"end",
|
||||
"enforce",
|
||||
"enqueue",
|
||||
"extract",
|
||||
"finish",
|
||||
"fix",
|
||||
"format",
|
||||
"guard",
|
||||
"handle",
|
||||
"hide",
|
||||
"ignore",
|
||||
"implement",
|
||||
"improve",
|
||||
"increase",
|
||||
"keep",
|
||||
"kill",
|
||||
"make",
|
||||
"merge",
|
||||
"move",
|
||||
"permit",
|
||||
"prevent",
|
||||
"push",
|
||||
"rebase",
|
||||
"refactor",
|
||||
"remove",
|
||||
"rename",
|
||||
"reorder",
|
||||
"replace",
|
||||
"require",
|
||||
"restore",
|
||||
"send",
|
||||
"separate",
|
||||
"set",
|
||||
"show",
|
||||
"simplify",
|
||||
"skip",
|
||||
"sort",
|
||||
"speed",
|
||||
"start",
|
||||
"support",
|
||||
"take",
|
||||
"test",
|
||||
"truncate",
|
||||
"update",
|
||||
"use",
|
||||
]
|
||||
imperative_forms.sort()
|
||||
|
||||
|
@ -260,8 +260,8 @@ def head_binary_search(key: str, words: List[str]) -> str:
|
|||
3 characters."""
|
||||
|
||||
# Edge case: 'disable' and 'display' have the same 3 starting letters.
|
||||
if key in ['displays', 'displaying', 'displayed']:
|
||||
return 'display'
|
||||
if key in ["displays", "displaying", "displayed"]:
|
||||
return "display"
|
||||
|
||||
lower = 0
|
||||
upper = len(words) - 1
|
||||
|
@ -292,7 +292,7 @@ class ImperativeMood(LineRule):
|
|||
target = CommitMessageTitle
|
||||
|
||||
error_msg = (
|
||||
'The first word in commit title should be in imperative mood '
|
||||
"The first word in commit title should be in imperative mood "
|
||||
'("{word}" -> "{imperative}"): "{title}"'
|
||||
)
|
||||
|
||||
|
@ -300,7 +300,7 @@ class ImperativeMood(LineRule):
|
|||
violations = []
|
||||
|
||||
# Ignore the section tag (ie `<section tag>: <message body>.`)
|
||||
words = line.split(': ', 1)[-1].split()
|
||||
words = line.split(": ", 1)[-1].split()
|
||||
first_word = words[0].lower()
|
||||
|
||||
if first_word in WORD_SET:
|
||||
|
|
16
tools/lint
16
tools/lint
|
@ -9,7 +9,7 @@ from custom_check import non_py_rules, python_rules
|
|||
|
||||
EXCLUDED_FILES = [
|
||||
# This is an external file that doesn't comply with our codestyle
|
||||
'zulip/integrations/perforce/git_p4.py',
|
||||
"zulip/integrations/perforce/git_p4.py",
|
||||
]
|
||||
|
||||
|
||||
|
@ -21,21 +21,21 @@ def run() -> None:
|
|||
linter_config = LinterConfig(args)
|
||||
|
||||
by_lang = linter_config.list_files(
|
||||
file_types=['py', 'sh', 'json', 'md', 'txt'], exclude=EXCLUDED_FILES
|
||||
file_types=["py", "sh", "json", "md", "txt"], exclude=EXCLUDED_FILES
|
||||
)
|
||||
|
||||
linter_config.external_linter(
|
||||
'mypy',
|
||||
[sys.executable, 'tools/run-mypy'],
|
||||
['py'],
|
||||
"mypy",
|
||||
[sys.executable, "tools/run-mypy"],
|
||||
["py"],
|
||||
pass_targets=False,
|
||||
description="Static type checker for Python (config: mypy.ini)",
|
||||
)
|
||||
linter_config.external_linter(
|
||||
'flake8', ['flake8'], ['py'], description="Standard Python linter (config: .flake8)"
|
||||
"flake8", ["flake8"], ["py"], description="Standard Python linter (config: .flake8)"
|
||||
)
|
||||
linter_config.external_linter(
|
||||
'gitlint', ['tools/lint-commits'], description="Git Lint for commit messages"
|
||||
"gitlint", ["tools/lint-commits"], description="Git Lint for commit messages"
|
||||
)
|
||||
|
||||
@linter_config.lint
|
||||
|
@ -55,5 +55,5 @@ def run() -> None:
|
|||
linter_config.do_lint()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
|
|
|
@ -7,13 +7,13 @@ import subprocess
|
|||
import sys
|
||||
|
||||
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
ZULIP_BOTS_DIR = os.path.join(CURRENT_DIR, '..', 'zulip_bots')
|
||||
ZULIP_BOTS_DIR = os.path.join(CURRENT_DIR, "..", "zulip_bots")
|
||||
sys.path.append(ZULIP_BOTS_DIR)
|
||||
|
||||
red = '\033[91m'
|
||||
green = '\033[92m'
|
||||
end_format = '\033[0m'
|
||||
bold = '\033[1m'
|
||||
red = "\033[91m"
|
||||
green = "\033[92m"
|
||||
end_format = "\033[0m"
|
||||
bold = "\033[1m"
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -23,25 +23,25 @@ Creates a Python virtualenv. Its Python version is equal to
|
|||
the Python version this command is executed with."""
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
parser.add_argument(
|
||||
'--python-interpreter',
|
||||
'-p',
|
||||
metavar='PATH_TO_PYTHON_INTERPRETER',
|
||||
"--python-interpreter",
|
||||
"-p",
|
||||
metavar="PATH_TO_PYTHON_INTERPRETER",
|
||||
default=os.path.abspath(sys.executable),
|
||||
help='Path to the Python interpreter to use when provisioning.',
|
||||
help="Path to the Python interpreter to use when provisioning.",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--force', '-f', action='store_true', help='create venv even with outdated Python version.'
|
||||
"--force", "-f", action="store_true", help="create venv even with outdated Python version."
|
||||
)
|
||||
options = parser.parse_args()
|
||||
|
||||
base_dir = os.path.abspath(os.path.join(__file__, '..', '..'))
|
||||
base_dir = os.path.abspath(os.path.join(__file__, "..", ".."))
|
||||
py_version_output = subprocess.check_output(
|
||||
[options.python_interpreter, '--version'], stderr=subprocess.STDOUT, universal_newlines=True
|
||||
[options.python_interpreter, "--version"], stderr=subprocess.STDOUT, universal_newlines=True
|
||||
)
|
||||
# The output has the format "Python 1.2.3"
|
||||
py_version_list = py_version_output.split()[1].split('.')
|
||||
py_version_list = py_version_output.split()[1].split(".")
|
||||
py_version = tuple(int(num) for num in py_version_list[0:2])
|
||||
venv_name = 'zulip-api-py{}-venv'.format(py_version[0])
|
||||
venv_name = "zulip-api-py{}-venv".format(py_version[0])
|
||||
|
||||
if py_version <= (3, 1) and (not options.force):
|
||||
print(
|
||||
|
@ -53,7 +53,7 @@ the Python version this command is executed with."""
|
|||
venv_dir = os.path.join(base_dir, venv_name)
|
||||
if not os.path.isdir(venv_dir):
|
||||
try:
|
||||
return_code = subprocess.call([options.python_interpreter, '-m', 'venv', venv_dir])
|
||||
return_code = subprocess.call([options.python_interpreter, "-m", "venv", venv_dir])
|
||||
except OSError:
|
||||
print(
|
||||
"{red}Installation with venv failed. Probable errors are: "
|
||||
|
@ -77,34 +77,34 @@ the Python version this command is executed with."""
|
|||
else:
|
||||
print("Virtualenv already exists.")
|
||||
|
||||
if os.path.isdir(os.path.join(venv_dir, 'Scripts')):
|
||||
if os.path.isdir(os.path.join(venv_dir, "Scripts")):
|
||||
# POSIX compatibility layer and Linux environment emulation for Windows
|
||||
# venv uses /Scripts instead of /bin on Windows cmd and Power Shell.
|
||||
# Read https://docs.python.org/3/library/venv.html
|
||||
venv_exec_dir = 'Scripts'
|
||||
venv_exec_dir = "Scripts"
|
||||
else:
|
||||
venv_exec_dir = 'bin'
|
||||
venv_exec_dir = "bin"
|
||||
|
||||
# On OS X, ensure we use the virtualenv version of the python binary for
|
||||
# future subprocesses instead of the version that this script was launched with. See
|
||||
# https://stackoverflow.com/questions/26323852/whats-the-meaning-of-pyvenv-launcher-environment-variable
|
||||
if '__PYVENV_LAUNCHER__' in os.environ:
|
||||
del os.environ['__PYVENV_LAUNCHER__']
|
||||
if "__PYVENV_LAUNCHER__" in os.environ:
|
||||
del os.environ["__PYVENV_LAUNCHER__"]
|
||||
|
||||
# In order to install all required packages for the venv, `pip` needs to be executed by
|
||||
# the venv's Python interpreter. `--prefix venv_dir` ensures that all modules are installed
|
||||
# in the right place.
|
||||
def install_dependencies(requirements_filename):
|
||||
pip_path = os.path.join(venv_dir, venv_exec_dir, 'pip')
|
||||
pip_path = os.path.join(venv_dir, venv_exec_dir, "pip")
|
||||
# We first install a modern version of pip that supports --prefix
|
||||
subprocess.call([pip_path, 'install', 'pip>=9.0'])
|
||||
subprocess.call([pip_path, "install", "pip>=9.0"])
|
||||
if subprocess.call(
|
||||
[
|
||||
pip_path,
|
||||
'install',
|
||||
'--prefix',
|
||||
"install",
|
||||
"--prefix",
|
||||
venv_dir,
|
||||
'-r',
|
||||
"-r",
|
||||
os.path.join(base_dir, requirements_filename),
|
||||
]
|
||||
):
|
||||
|
@ -114,7 +114,7 @@ the Python version this command is executed with."""
|
|||
)
|
||||
)
|
||||
|
||||
install_dependencies('requirements.txt')
|
||||
install_dependencies("requirements.txt")
|
||||
|
||||
# Install all requirements for all bots. get_bot_paths()
|
||||
# has requirements that must be satisfied prior to calling
|
||||
|
@ -127,15 +127,15 @@ the Python version this command is executed with."""
|
|||
relative_path = os.path.join(*path_split)
|
||||
install_dependencies(relative_path)
|
||||
|
||||
print(green + 'Success!' + end_format)
|
||||
print(green + "Success!" + end_format)
|
||||
|
||||
activate_command = os.path.join(base_dir, venv_dir, venv_exec_dir, 'activate')
|
||||
activate_command = os.path.join(base_dir, venv_dir, venv_exec_dir, "activate")
|
||||
# We make the path look like a Unix path, because most Windows users
|
||||
# are likely to be running in a bash shell.
|
||||
activate_command = activate_command.replace(os.sep, '/')
|
||||
print('\nRun the following to enter into the virtualenv:\n')
|
||||
print(bold + ' source ' + activate_command + end_format + "\n")
|
||||
activate_command = activate_command.replace(os.sep, "/")
|
||||
print("\nRun the following to enter into the virtualenv:\n")
|
||||
print(bold + " source " + activate_command + end_format + "\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -25,7 +25,7 @@ def cd(newdir):
|
|||
|
||||
|
||||
def _generate_dist(dist_type, setup_file, package_name, setup_args):
|
||||
message = 'Generating {dist_type} for {package_name}.'.format(
|
||||
message = "Generating {dist_type} for {package_name}.".format(
|
||||
dist_type=dist_type,
|
||||
package_name=package_name,
|
||||
)
|
||||
|
@ -35,7 +35,7 @@ def _generate_dist(dist_type, setup_file, package_name, setup_args):
|
|||
with cd(setup_dir):
|
||||
setuptools.sandbox.run_setup(setup_file, setup_args)
|
||||
|
||||
message = '{dist_type} for {package_name} generated under {dir}.\n'.format(
|
||||
message = "{dist_type} for {package_name} generated under {dir}.\n".format(
|
||||
dist_type=dist_type,
|
||||
package_name=package_name,
|
||||
dir=setup_dir,
|
||||
|
@ -45,13 +45,13 @@ def _generate_dist(dist_type, setup_file, package_name, setup_args):
|
|||
|
||||
def generate_bdist_wheel(setup_file, package_name, universal=False):
|
||||
if universal:
|
||||
_generate_dist('bdist_wheel', setup_file, package_name, ['bdist_wheel', '--universal'])
|
||||
_generate_dist("bdist_wheel", setup_file, package_name, ["bdist_wheel", "--universal"])
|
||||
else:
|
||||
_generate_dist('bdist_wheel', setup_file, package_name, ['bdist_wheel'])
|
||||
_generate_dist("bdist_wheel", setup_file, package_name, ["bdist_wheel"])
|
||||
|
||||
|
||||
def twine_upload(dist_dirs):
|
||||
message = 'Uploading distributions under the following directories:'
|
||||
message = "Uploading distributions under the following directories:"
|
||||
print(crayons.green(message, bold=True))
|
||||
for dist_dir in dist_dirs:
|
||||
print(crayons.yellow(dist_dir))
|
||||
|
@ -59,14 +59,14 @@ def twine_upload(dist_dirs):
|
|||
|
||||
|
||||
def cleanup(package_dir):
|
||||
build_dir = os.path.join(package_dir, 'build')
|
||||
temp_dir = os.path.join(package_dir, 'temp')
|
||||
dist_dir = os.path.join(package_dir, 'dist')
|
||||
egg_info = os.path.join(package_dir, '{}.egg-info'.format(os.path.basename(package_dir)))
|
||||
build_dir = os.path.join(package_dir, "build")
|
||||
temp_dir = os.path.join(package_dir, "temp")
|
||||
dist_dir = os.path.join(package_dir, "dist")
|
||||
egg_info = os.path.join(package_dir, "{}.egg-info".format(os.path.basename(package_dir)))
|
||||
|
||||
def _rm_if_it_exists(directory):
|
||||
if os.path.isdir(directory):
|
||||
print(crayons.green('Removing {}/*'.format(directory), bold=True))
|
||||
print(crayons.green("Removing {}/*".format(directory), bold=True))
|
||||
shutil.rmtree(directory)
|
||||
|
||||
_rm_if_it_exists(build_dir)
|
||||
|
@ -77,11 +77,11 @@ def cleanup(package_dir):
|
|||
|
||||
def set_variable(fp, variable, value):
|
||||
fh, temp_abs_path = tempfile.mkstemp()
|
||||
with os.fdopen(fh, 'w') as new_file, open(fp) as old_file:
|
||||
with os.fdopen(fh, "w") as new_file, open(fp) as old_file:
|
||||
for line in old_file:
|
||||
if line.startswith(variable):
|
||||
if isinstance(value, bool):
|
||||
template = '{variable} = {value}\n'
|
||||
template = "{variable} = {value}\n"
|
||||
else:
|
||||
template = '{variable} = "{value}"\n'
|
||||
new_file.write(template.format(variable=variable, value=value))
|
||||
|
@ -91,22 +91,22 @@ def set_variable(fp, variable, value):
|
|||
os.remove(fp)
|
||||
shutil.move(temp_abs_path, fp)
|
||||
|
||||
message = 'Set {variable} in {fp} to {value}.'.format(fp=fp, variable=variable, value=value)
|
||||
message = "Set {variable} in {fp} to {value}.".format(fp=fp, variable=variable, value=value)
|
||||
print(crayons.white(message, bold=True))
|
||||
|
||||
|
||||
def update_requirements_in_zulip_repo(zulip_repo_dir, version, hash_or_tag):
|
||||
common = os.path.join(zulip_repo_dir, 'requirements', 'common.in')
|
||||
prod = os.path.join(zulip_repo_dir, 'requirements', 'prod.txt')
|
||||
dev = os.path.join(zulip_repo_dir, 'requirements', 'dev.txt')
|
||||
common = os.path.join(zulip_repo_dir, "requirements", "common.in")
|
||||
prod = os.path.join(zulip_repo_dir, "requirements", "prod.txt")
|
||||
dev = os.path.join(zulip_repo_dir, "requirements", "dev.txt")
|
||||
|
||||
def _edit_reqs_file(reqs, zulip_bots_line, zulip_line):
|
||||
fh, temp_abs_path = tempfile.mkstemp()
|
||||
with os.fdopen(fh, 'w') as new_file, open(reqs) as old_file:
|
||||
with os.fdopen(fh, "w") as new_file, open(reqs) as old_file:
|
||||
for line in old_file:
|
||||
if 'python-zulip-api' in line and 'zulip==' in line:
|
||||
if "python-zulip-api" in line and "zulip==" in line:
|
||||
new_file.write(zulip_line)
|
||||
elif 'python-zulip-api' in line and 'zulip_bots' in line:
|
||||
elif "python-zulip-api" in line and "zulip_bots" in line:
|
||||
new_file.write(zulip_bots_line)
|
||||
else:
|
||||
new_file.write(line)
|
||||
|
@ -114,10 +114,10 @@ def update_requirements_in_zulip_repo(zulip_repo_dir, version, hash_or_tag):
|
|||
os.remove(reqs)
|
||||
shutil.move(temp_abs_path, reqs)
|
||||
|
||||
url_zulip = 'git+https://github.com/zulip/python-zulip-api.git@{tag}#egg={name}=={version}_git&subdirectory={name}\n'
|
||||
url_zulip_bots = 'git+https://github.com/zulip/python-zulip-api.git@{tag}#egg={name}=={version}+git&subdirectory={name}\n'
|
||||
zulip_bots_line = url_zulip_bots.format(tag=hash_or_tag, name='zulip_bots', version=version)
|
||||
zulip_line = url_zulip.format(tag=hash_or_tag, name='zulip', version=version)
|
||||
url_zulip = "git+https://github.com/zulip/python-zulip-api.git@{tag}#egg={name}=={version}_git&subdirectory={name}\n"
|
||||
url_zulip_bots = "git+https://github.com/zulip/python-zulip-api.git@{tag}#egg={name}=={version}+git&subdirectory={name}\n"
|
||||
zulip_bots_line = url_zulip_bots.format(tag=hash_or_tag, name="zulip_bots", version=version)
|
||||
zulip_line = url_zulip.format(tag=hash_or_tag, name="zulip", version=version)
|
||||
|
||||
_edit_reqs_file(prod, zulip_bots_line, zulip_line)
|
||||
_edit_reqs_file(dev, zulip_bots_line, zulip_line)
|
||||
|
@ -127,11 +127,11 @@ def update_requirements_in_zulip_repo(zulip_repo_dir, version, hash_or_tag):
|
|||
|
||||
_edit_reqs_file(
|
||||
common,
|
||||
editable_zulip_bots.format(tag=hash_or_tag, name='zulip_bots', version=version),
|
||||
editable_zulip.format(tag=hash_or_tag, name='zulip', version=version),
|
||||
editable_zulip_bots.format(tag=hash_or_tag, name="zulip_bots", version=version),
|
||||
editable_zulip.format(tag=hash_or_tag, name="zulip", version=version),
|
||||
)
|
||||
|
||||
message = 'Updated zulip API package requirements in the main repo.'
|
||||
message = "Updated zulip API package requirements in the main repo."
|
||||
print(crayons.white(message, bold=True))
|
||||
|
||||
|
||||
|
@ -177,39 +177,39 @@ And you're done! Congrats!
|
|||
parser = argparse.ArgumentParser(usage=usage)
|
||||
|
||||
parser.add_argument(
|
||||
'--cleanup',
|
||||
'-c',
|
||||
action='store_true',
|
||||
"--cleanup",
|
||||
"-c",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Remove build directories (dist/, build/, egg-info/, etc).',
|
||||
help="Remove build directories (dist/, build/, egg-info/, etc).",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--build',
|
||||
'-b',
|
||||
metavar='VERSION_NUM',
|
||||
"--build",
|
||||
"-b",
|
||||
metavar="VERSION_NUM",
|
||||
help=(
|
||||
'Build sdists and wheels for all packages with the'
|
||||
'specified version number.'
|
||||
' sdists and wheels are stored in <package_name>/dist/*.'
|
||||
"Build sdists and wheels for all packages with the"
|
||||
"specified version number."
|
||||
" sdists and wheels are stored in <package_name>/dist/*."
|
||||
),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--release',
|
||||
'-r',
|
||||
action='store_true',
|
||||
"--release",
|
||||
"-r",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help='Upload the packages to PyPA using twine.',
|
||||
help="Upload the packages to PyPA using twine.",
|
||||
)
|
||||
|
||||
subparsers = parser.add_subparsers(dest='subcommand')
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
parser_main_repo = subparsers.add_parser(
|
||||
'update-main-repo', help='Update the zulip/requirements/* in the main zulip repo.'
|
||||
"update-main-repo", help="Update the zulip/requirements/* in the main zulip repo."
|
||||
)
|
||||
parser_main_repo.add_argument('repo', metavar='PATH_TO_ZULIP_DIR')
|
||||
parser_main_repo.add_argument('version', metavar='version number of the packages')
|
||||
parser_main_repo.add_argument('--hash', metavar='COMMIT_HASH')
|
||||
parser_main_repo.add_argument("repo", metavar="PATH_TO_ZULIP_DIR")
|
||||
parser_main_repo.add_argument("version", metavar="version number of the packages")
|
||||
parser_main_repo.add_argument("--hash", metavar="COMMIT_HASH")
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
@ -217,7 +217,7 @@ And you're done! Congrats!
|
|||
def main():
|
||||
options = parse_args()
|
||||
|
||||
glob_pattern = os.path.join(REPO_DIR, '*', 'setup.py')
|
||||
glob_pattern = os.path.join(REPO_DIR, "*", "setup.py")
|
||||
setup_py_files = glob.glob(glob_pattern)
|
||||
|
||||
if options.cleanup:
|
||||
|
@ -230,30 +230,30 @@ def main():
|
|||
for package_dir in package_dirs:
|
||||
cleanup(package_dir)
|
||||
|
||||
zulip_init = os.path.join(REPO_DIR, 'zulip', 'zulip', '__init__.py')
|
||||
set_variable(zulip_init, '__version__', options.build)
|
||||
bots_setup = os.path.join(REPO_DIR, 'zulip_bots', 'setup.py')
|
||||
set_variable(bots_setup, 'ZULIP_BOTS_VERSION', options.build)
|
||||
set_variable(bots_setup, 'IS_PYPA_PACKAGE', True)
|
||||
botserver_setup = os.path.join(REPO_DIR, 'zulip_botserver', 'setup.py')
|
||||
set_variable(botserver_setup, 'ZULIP_BOTSERVER_VERSION', options.build)
|
||||
zulip_init = os.path.join(REPO_DIR, "zulip", "zulip", "__init__.py")
|
||||
set_variable(zulip_init, "__version__", options.build)
|
||||
bots_setup = os.path.join(REPO_DIR, "zulip_bots", "setup.py")
|
||||
set_variable(bots_setup, "ZULIP_BOTS_VERSION", options.build)
|
||||
set_variable(bots_setup, "IS_PYPA_PACKAGE", True)
|
||||
botserver_setup = os.path.join(REPO_DIR, "zulip_botserver", "setup.py")
|
||||
set_variable(botserver_setup, "ZULIP_BOTSERVER_VERSION", options.build)
|
||||
|
||||
for setup_file in setup_py_files:
|
||||
package_name = os.path.basename(os.path.dirname(setup_file))
|
||||
generate_bdist_wheel(setup_file, package_name)
|
||||
|
||||
set_variable(bots_setup, 'IS_PYPA_PACKAGE', False)
|
||||
set_variable(bots_setup, "IS_PYPA_PACKAGE", False)
|
||||
|
||||
if options.release:
|
||||
dist_dirs = glob.glob(os.path.join(REPO_DIR, '*', 'dist', '*'))
|
||||
dist_dirs = glob.glob(os.path.join(REPO_DIR, "*", "dist", "*"))
|
||||
twine_upload(dist_dirs)
|
||||
|
||||
if options.subcommand == 'update-main-repo':
|
||||
if options.subcommand == "update-main-repo":
|
||||
if options.hash:
|
||||
update_requirements_in_zulip_repo(options.repo, options.version, options.hash)
|
||||
else:
|
||||
update_requirements_in_zulip_repo(options.repo, options.version, options.version)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
38
tools/review
38
tools/review
|
@ -5,63 +5,63 @@ import sys
|
|||
|
||||
|
||||
def exit(message: str) -> None:
|
||||
print('PROBLEM!')
|
||||
print("PROBLEM!")
|
||||
print(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def run(command: str) -> None:
|
||||
print('\n>>> ' + command)
|
||||
print("\n>>> " + command)
|
||||
subprocess.check_call(command.split())
|
||||
|
||||
|
||||
def check_output(command: str) -> str:
|
||||
return subprocess.check_output(command.split()).decode('ascii')
|
||||
return subprocess.check_output(command.split()).decode("ascii")
|
||||
|
||||
|
||||
def get_git_branch() -> str:
|
||||
command = 'git rev-parse --abbrev-ref HEAD'
|
||||
command = "git rev-parse --abbrev-ref HEAD"
|
||||
output = check_output(command)
|
||||
return output.strip()
|
||||
|
||||
|
||||
def check_git_pristine() -> None:
|
||||
command = 'git status --porcelain'
|
||||
command = "git status --porcelain"
|
||||
output = check_output(command)
|
||||
if output.strip():
|
||||
exit('Git is not pristine:\n' + output)
|
||||
exit("Git is not pristine:\n" + output)
|
||||
|
||||
|
||||
def ensure_on_clean_master() -> None:
|
||||
branch = get_git_branch()
|
||||
if branch != 'master':
|
||||
exit('You are still on a feature branch: %s' % (branch,))
|
||||
if branch != "master":
|
||||
exit("You are still on a feature branch: %s" % (branch,))
|
||||
check_git_pristine()
|
||||
run('git fetch upstream master')
|
||||
run('git rebase upstream/master')
|
||||
run("git fetch upstream master")
|
||||
run("git rebase upstream/master")
|
||||
|
||||
|
||||
def create_pull_branch(pull_id: int) -> None:
|
||||
run('git fetch upstream pull/%d/head' % (pull_id,))
|
||||
run('git checkout -B review-%s FETCH_HEAD' % (pull_id,))
|
||||
run('git rebase upstream/master')
|
||||
run('git log upstream/master.. --oneline')
|
||||
run('git diff upstream/master.. --name-status')
|
||||
run("git fetch upstream pull/%d/head" % (pull_id,))
|
||||
run("git checkout -B review-%s FETCH_HEAD" % (pull_id,))
|
||||
run("git rebase upstream/master")
|
||||
run("git log upstream/master.. --oneline")
|
||||
run("git diff upstream/master.. --name-status")
|
||||
|
||||
print()
|
||||
print('PR: %d' % (pull_id,))
|
||||
print(subprocess.check_output(['git', 'log', 'HEAD~..', '--pretty=format:Author: %an']))
|
||||
print("PR: %d" % (pull_id,))
|
||||
print(subprocess.check_output(["git", "log", "HEAD~..", "--pretty=format:Author: %an"]))
|
||||
|
||||
|
||||
def review_pr() -> None:
|
||||
try:
|
||||
pull_id = int(sys.argv[1])
|
||||
except Exception:
|
||||
exit('please provide an integer pull request id')
|
||||
exit("please provide an integer pull request id")
|
||||
|
||||
ensure_on_clean_master()
|
||||
create_pull_branch(pull_id)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
review_pr()
|
||||
|
|
|
@ -104,54 +104,54 @@ force_include = [
|
|||
|
||||
parser = argparse.ArgumentParser(description="Run mypy on files tracked by git.")
|
||||
parser.add_argument(
|
||||
'targets',
|
||||
nargs='*',
|
||||
"targets",
|
||||
nargs="*",
|
||||
default=[],
|
||||
help="""files and directories to include in the result.
|
||||
If this is not specified, the current directory is used""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-m', '--modified', action='store_true', default=False, help='list only modified files'
|
||||
"-m", "--modified", action="store_true", default=False, help="list only modified files"
|
||||
)
|
||||
parser.add_argument(
|
||||
'-a',
|
||||
'--all',
|
||||
dest='all',
|
||||
action='store_true',
|
||||
"-a",
|
||||
"--all",
|
||||
dest="all",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""run mypy on all python files, ignoring the exclude list.
|
||||
This is useful if you have to find out which files fail mypy check.""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-disallow-untyped-defs',
|
||||
dest='disallow_untyped_defs',
|
||||
action='store_false',
|
||||
"--no-disallow-untyped-defs",
|
||||
dest="disallow_untyped_defs",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="""Don't throw errors when functions are not annotated""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--scripts-only',
|
||||
dest='scripts_only',
|
||||
action='store_true',
|
||||
"--scripts-only",
|
||||
dest="scripts_only",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""Only type check extensionless python scripts""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--warn-unused-ignores',
|
||||
dest='warn_unused_ignores',
|
||||
action='store_true',
|
||||
"--warn-unused-ignores",
|
||||
dest="warn_unused_ignores",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="""Use the --warn-unused-ignores flag with mypy""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-ignore-missing-imports',
|
||||
dest='ignore_missing_imports',
|
||||
action='store_false',
|
||||
"--no-ignore-missing-imports",
|
||||
dest="ignore_missing_imports",
|
||||
action="store_false",
|
||||
default=True,
|
||||
help="""Don't use the --ignore-missing-imports flag with mypy""",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--quick', action='store_true', default=False, help="""Use the --quick flag with mypy"""
|
||||
"--quick", action="store_true", default=False, help="""Use the --quick flag with mypy"""
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
|
@ -163,10 +163,10 @@ files_dict = cast(
|
|||
Dict[str, List[str]],
|
||||
lister.list_files(
|
||||
targets=args.targets,
|
||||
ftypes=['py', 'pyi'],
|
||||
ftypes=["py", "pyi"],
|
||||
use_shebang=True,
|
||||
modified_only=args.modified,
|
||||
exclude=exclude + ['stubs'],
|
||||
exclude=exclude + ["stubs"],
|
||||
group_by_ftype=True,
|
||||
extless_only=args.scripts_only,
|
||||
),
|
||||
|
@ -174,18 +174,18 @@ files_dict = cast(
|
|||
|
||||
for inpath in force_include:
|
||||
try:
|
||||
ext = os.path.splitext(inpath)[1].split('.')[1]
|
||||
ext = os.path.splitext(inpath)[1].split(".")[1]
|
||||
except IndexError:
|
||||
ext = 'py' # type: str
|
||||
ext = "py" # type: str
|
||||
files_dict[ext].append(inpath)
|
||||
|
||||
pyi_files = set(files_dict['pyi'])
|
||||
pyi_files = set(files_dict["pyi"])
|
||||
python_files = [
|
||||
fpath for fpath in files_dict['py'] if not fpath.endswith('.py') or fpath + 'i' not in pyi_files
|
||||
fpath for fpath in files_dict["py"] if not fpath.endswith(".py") or fpath + "i" not in pyi_files
|
||||
]
|
||||
|
||||
repo_python_files = OrderedDict(
|
||||
[('zulip', []), ('zulip_bots', []), ('zulip_botserver', []), ('tools', [])]
|
||||
[("zulip", []), ("zulip_bots", []), ("zulip_botserver", []), ("tools", [])]
|
||||
)
|
||||
for file_path in python_files:
|
||||
repo = PurePath(file_path).parts[0]
|
||||
|
|
|
@ -13,45 +13,45 @@ os.chdir(os.path.dirname(TOOLS_DIR))
|
|||
def handle_input_and_run_tests_for_package(package_name, path_list):
|
||||
parser = argparse.ArgumentParser(description="Run tests for {}.".format(package_name))
|
||||
parser.add_argument(
|
||||
'--coverage',
|
||||
nargs='?',
|
||||
"--coverage",
|
||||
nargs="?",
|
||||
const=True,
|
||||
default=False,
|
||||
help='compute test coverage (--coverage combine to combine with previous reports)',
|
||||
help="compute test coverage (--coverage combine to combine with previous reports)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pytest', '-p', default=False, action='store_true', help="run tests with pytest"
|
||||
"--pytest", "-p", default=False, action="store_true", help="run tests with pytest"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
'-v',
|
||||
"--verbose",
|
||||
"-v",
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='show verbose output (with pytest)',
|
||||
action="store_true",
|
||||
help="show verbose output (with pytest)",
|
||||
)
|
||||
options = parser.parse_args()
|
||||
|
||||
test_session_title = ' Running tests for {} '.format(package_name)
|
||||
header = test_session_title.center(shutil.get_terminal_size().columns, '#')
|
||||
test_session_title = " Running tests for {} ".format(package_name)
|
||||
header = test_session_title.center(shutil.get_terminal_size().columns, "#")
|
||||
print(header)
|
||||
|
||||
if options.coverage:
|
||||
import coverage
|
||||
|
||||
cov = coverage.Coverage(config_file="tools/.coveragerc")
|
||||
if options.coverage == 'combine':
|
||||
if options.coverage == "combine":
|
||||
cov.load()
|
||||
cov.start()
|
||||
|
||||
if options.pytest:
|
||||
location_to_run_in = os.path.join(TOOLS_DIR, '..', *path_list)
|
||||
paths_to_test = ['.']
|
||||
location_to_run_in = os.path.join(TOOLS_DIR, "..", *path_list)
|
||||
paths_to_test = ["."]
|
||||
pytest_options = [
|
||||
'-s', # show output from tests; this hides the progress bar though
|
||||
'-x', # stop on first test failure
|
||||
'--ff', # runs last failure first
|
||||
"-s", # show output from tests; this hides the progress bar though
|
||||
"-x", # stop on first test failure
|
||||
"--ff", # runs last failure first
|
||||
]
|
||||
pytest_options += ['-v'] if options.verbose else []
|
||||
pytest_options += ["-v"] if options.verbose else []
|
||||
os.chdir(location_to_run_in)
|
||||
result = pytest.main(paths_to_test + pytest_options)
|
||||
if result != 0:
|
||||
|
|
|
@ -32,35 +32,35 @@ the tests for xkcd and wikipedia bots):
|
|||
parser = argparse.ArgumentParser(description=description)
|
||||
|
||||
parser.add_argument(
|
||||
'bots_to_test',
|
||||
metavar='bot',
|
||||
nargs='*',
|
||||
"bots_to_test",
|
||||
metavar="bot",
|
||||
nargs="*",
|
||||
default=[],
|
||||
help='specific bots to test (default is all)',
|
||||
help="specific bots to test (default is all)",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--coverage',
|
||||
nargs='?',
|
||||
"--coverage",
|
||||
nargs="?",
|
||||
const=True,
|
||||
default=False,
|
||||
help='compute test coverage (--coverage combine to combine with previous reports)',
|
||||
help="compute test coverage (--coverage combine to combine with previous reports)",
|
||||
)
|
||||
parser.add_argument('--exclude', metavar='bot', nargs='*', default=[], help='bot(s) to exclude')
|
||||
parser.add_argument("--exclude", metavar="bot", nargs="*", default=[], help="bot(s) to exclude")
|
||||
parser.add_argument(
|
||||
'--error-on-no-init',
|
||||
"--error-on-no-init",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="whether to exit if a bot has tests which won't run due to no __init__.py",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pytest', '-p', default=False, action='store_true', help="run tests with pytest"
|
||||
"--pytest", "-p", default=False, action="store_true", help="run tests with pytest"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--verbose',
|
||||
'-v',
|
||||
"--verbose",
|
||||
"-v",
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='show verbose output (with pytest)',
|
||||
action="store_true",
|
||||
help="show verbose output (with pytest)",
|
||||
)
|
||||
return parser.parse_args()
|
||||
|
||||
|
@ -69,8 +69,8 @@ def main():
|
|||
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
os.chdir(os.path.dirname(TOOLS_DIR))
|
||||
sys.path.insert(0, TOOLS_DIR)
|
||||
bots_dir = os.path.join(TOOLS_DIR, '..', 'zulip_bots/zulip_bots/bots')
|
||||
glob_pattern = bots_dir + '/*/test_*.py'
|
||||
bots_dir = os.path.join(TOOLS_DIR, "..", "zulip_bots/zulip_bots/bots")
|
||||
glob_pattern = bots_dir + "/*/test_*.py"
|
||||
test_modules = glob.glob(glob_pattern)
|
||||
|
||||
# get only the names of bots that have tests
|
||||
|
@ -82,7 +82,7 @@ def main():
|
|||
import coverage
|
||||
|
||||
cov = coverage.Coverage(config_file="tools/.coveragerc")
|
||||
if options.coverage == 'combine':
|
||||
if options.coverage == "combine":
|
||||
cov.load()
|
||||
cov.start()
|
||||
|
||||
|
@ -96,14 +96,14 @@ def main():
|
|||
bots_to_test = {bot for bot in specified_bots if bot not in options.exclude}
|
||||
|
||||
if options.pytest:
|
||||
excluded_bots = ['merels']
|
||||
excluded_bots = ["merels"]
|
||||
pytest_bots_to_test = sorted([bot for bot in bots_to_test if bot not in excluded_bots])
|
||||
pytest_options = [
|
||||
'-s', # show output from tests; this hides the progress bar though
|
||||
'-x', # stop on first test failure
|
||||
'--ff', # runs last failure first
|
||||
"-s", # show output from tests; this hides the progress bar though
|
||||
"-x", # stop on first test failure
|
||||
"--ff", # runs last failure first
|
||||
]
|
||||
pytest_options += ['-v'] if options.verbose else []
|
||||
pytest_options += ["-v"] if options.verbose else []
|
||||
os.chdir(bots_dir)
|
||||
result = pytest.main(pytest_bots_to_test + pytest_options)
|
||||
if result != 0:
|
||||
|
@ -142,5 +142,5 @@ def main():
|
|||
print("HTML report saved under directory 'htmlcov'.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
|
||||
from server_lib.test_handler import handle_input_and_run_tests_for_package
|
||||
|
||||
if __name__ == '__main__':
|
||||
handle_input_and_run_tests_for_package('Botserver', ['zulip_botserver'])
|
||||
if __name__ == "__main__":
|
||||
handle_input_and_run_tests_for_package("Botserver", ["zulip_botserver"])
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
|
||||
from server_lib.test_handler import handle_input_and_run_tests_for_package
|
||||
|
||||
if __name__ == '__main__':
|
||||
handle_input_and_run_tests_for_package('Bot library', ['zulip_bots', 'zulip_bots', 'tests'])
|
||||
if __name__ == "__main__":
|
||||
handle_input_and_run_tests_for_package("Bot library", ["zulip_bots", "zulip_bots", "tests"])
|
||||
|
|
|
@ -2,5 +2,5 @@
|
|||
|
||||
from server_lib.test_handler import handle_input_and_run_tests_for_package
|
||||
|
||||
if __name__ == '__main__':
|
||||
handle_input_and_run_tests_for_package('API', ['zulip'])
|
||||
if __name__ == "__main__":
|
||||
handle_input_and_run_tests_for_package("API", ["zulip"])
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue