---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.1.0
+ rev: v3.4.0
hooks:
- id: trailing-whitespace
- repo: https://github.com/ambv/black
- rev: 19.10b0
+ rev: 21.6b0
hooks:
- id: black
- repo: https://github.com/jorisroovers/gitlint
- rev: v0.13.1
+ rev: v0.15.1
hooks:
- id: gitlint
access_str = "changes/{}/edit:publish".format(changeid)
headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({"notify": "NONE",})
+ payload = json.dumps(
+ {
+ "notify": "NONE",
+ }
+ )
result = self.post(access_str, data=payload)
return result
##############################################################
access_str = "changes/{}/edit:publish".format(changeid)
headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({"notify": "NONE",})
+ payload = json.dumps(
+ {
+ "notify": "NONE",
+ }
+ )
result = self.post(access_str, data=payload)
log.info(result)
return result
headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
payload = json.dumps(
- {"tag": "automation", "message": "Vote on file", "labels": {"Verified": +1, "Code-Review": +2,}}
+ {
+ "tag": "automation",
+ "message": "Vote on file",
+ "labels": {
+ "Verified": +1,
+ "Code-Review": +2,
+ },
+ }
)
result = self.post(access_str, data=payload)
else:
subject = "Automation adds {0}\n\nSigned-off-by: {1}".format(filename, signed_off_by)
payload = json.dumps(
- {"project": "{}".format(gerrit_project), "subject": "{}".format(subject), "branch": "master",}
+ {
+ "project": "{}".format(gerrit_project),
+ "subject": "{}".format(subject),
+ "branch": "master",
+ }
)
return payload
access_str = "changes/{}/edit:publish".format(changeid)
headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({"notify": "NONE",})
+ payload = json.dumps(
+ {
+ "notify": "NONE",
+ }
+ )
result = self.post(access_str, data=payload)
log.info(result)
)
if repo_provider == "site":
data["data"].update(
- {"repoPolicy": "MIXED", "writePolicy": "ALLOW_WRITE", "indexable": False,}
+ {
+ "repoPolicy": "MIXED",
+ "writePolicy": "ALLOW_WRITE",
+ "indexable": False,
+ }
)
if repo_type == "proxy":
list_of_privileges = []
for privilege in result:
list_of_privileges.append(
- [privilege["type"], privilege["name"], privilege["description"], privilege["readOnly"],]
+ [
+ privilege["type"],
+ privilege["name"],
+ privilege["description"],
+ privilege["readOnly"],
+ ]
)
return list_of_privileges
list_of_tasks = []
for task in result:
list_of_tasks.append(
- [task["name"], task["message"], task["currentState"], task["lastRunResult"],]
+ [
+ task["name"],
+ task["message"],
+ task["currentState"],
+ task["lastRunResult"],
+ ]
)
return list_of_tasks
"repository": repository,
}
json_data = json.dumps(data)
- result = self.get("v1/search/assets?q={}&repository={}".format(query, repository), data=json_data,)[1]["items"]
+ result = self.get("v1/search/assets?q={}&repository={}".format(query, repository), data=json_data,)[
+ 1
+ ]["items"]
list_of_assets = []
if details:
"""List tasks."""
r = ctx.obj["nexus3"]
data = r.list_tasks()
- log.info(tabulate(data, headers=["Name", "Message", "Current State", "Last Run Result"],))
+ log.info(
+ tabulate(
+ data,
+ headers=["Name", "Message", "Current State", "Last Run Result"],
+ )
+ )
"""Search users."""
r = ctx.obj["nexus3"]
data = r.list_user(username)
- log.info(tabulate(data, headers=["User ID", "First Name", "Last Name", "Email Address", "Status", "Roles",],))
+ log.info(
+ tabulate(
+ data,
+ headers=[
+ "User ID",
+ "First Name",
+ "Last Name",
+ "Email Address",
+ "Status",
+ "Roles",
+ ],
+ )
+ )
@user.command(name="create")
log.info(pformat(data))
-@click.command(name="project-update", context_settings=dict(ignore_unknown_options=True, allow_extra_args=True,))
+@click.command(
+ name="project-update",
+ context_settings=dict(
+ ignore_unknown_options=True,
+ allow_extra_args=True,
+ ),
+)
@click.argument("project-name")
@click.pass_context
def project_update(ctx, project_name):
"""Create a target with the given patterns."""
url = os.path.join(self.baseurl, "repo_targets")
- target = {"data": {"contentClass": "any", "patterns": patterns, "name": name,}}
+ target = {
+ "data": {
+ "contentClass": "any",
+ "patterns": patterns,
+ "name": name,
+ }
+ }
json_data = json.dumps(target).encode(encoding="latin-1")
"data": {
"name": name,
"description": name,
- "method": [priv,],
+ "method": [
+ priv,
+ ],
"repositoryGroupId": "",
"repositoryId": "",
"repositoryTargetId": target_id,
"email": "{}-deploy@{}".format(name, domain),
"firstName": name,
"lastName": "Deployment",
- "roles": [role_id, "nx-deployment",],
+ "roles": [
+ role_id,
+ "nx-deployment",
+ ],
"password": password,
"status": "active",
}
import lftools.deploy as deploy_sys
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
def test_log_and_exit():
assert excinfo.type == SystemExit
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_copy_archive_dir(cli_runner, datafiles):
"""Test copy_archives() command to ensure archives dir is copied."""
os.chdir(str(datafiles))
assert os.path.exists(os.path.join(stage_dir, "test.log"))
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_copy_archive_pattern(cli_runner, datafiles):
"""Test copy_archives() command to ensure glob patterns are copied."""
os.chdir(str(datafiles))
)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_archive(cli_runner, datafiles, responses):
"""Test deploy_archives() command for expected upload cases."""
os.chdir(str(datafiles))
assert result.exit_code == 1
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_archive2(datafiles):
"""Test deploy_archives() command when archives dir is missing."""
os.chdir(str(datafiles))
assert workspace_dir in str(excinfo.value)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_archive3(datafiles):
"""Test deploy_archives() command when archives dir is a file instead of a dir."""
os.chdir(str(datafiles))
assert workspace_dir in str(excinfo.value)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_archive4(cli_runner, datafiles, responses):
"""Test deploy_archives() command when using duplicated patterns."""
os.chdir(str(datafiles))
assert deploy_sys._remove_duplicates_and_sort(tst[0]) == tst[1]
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_logs(cli_runner, datafiles, responses):
"""Test deploy_logs() command for expected upload cases."""
os.chdir(str(datafiles))
assert result.exit_code == 0
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_nexus_zip(cli_runner, datafiles, responses):
os.chdir(str(datafiles))
nexus_url = "https://nexus.example.org"
# Test success
success_upload_url = "{}/service/local/repositories/{}/content-compressed/{}".format(
- nexus_url, nexus_repo, nexus_path,
+ nexus_url,
+ nexus_repo,
+ nexus_path,
)
responses.add(responses.POST, success_upload_url, status=201)
result = cli_runner.invoke(
</body>
</html>
"""
- upload_404_url = "{}/service/local/repositories/{}/content-compressed/{}".format(nexus_url, "logs2", nexus_path,)
+ upload_404_url = "{}/service/local/repositories/{}/content-compressed/{}".format(
+ nexus_url,
+ "logs2",
+ nexus_path,
+ )
responses.add(responses.POST, upload_404_url, body=upload_404, status=404)
result = cli_runner.invoke(
cli.cli,
assert "Something went wrong" in str(excinfo.value)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_nexus_snapshot(datafiles, responses):
"""Test deploy_nexus with snapshot.
deploy_sys.deploy_nexus(nexus_url, deploy_dir, snapshot)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_nexus_nosnapshot(datafiles, responses):
"""Test deploy_nexus with no snapshot.
deploy_sys.deploy_nexus(nexus_url, deploy_dir)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "deploy"),
+)
def test_deploy_nexus_stage(datafiles, responses):
"""Test deploy_nexus_stage."""
url = "http://valid.deploy.stage"
from lftools import cli
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "license"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "license"),
+)
def test_check_license(cli_runner, datafiles):
"""Test check_license() command."""
os.chdir(str(datafiles))
assert result.exit_code == 1
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "license"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "license"),
+)
def test_check_license_directory(cli_runner, datafiles):
"""Test check_license_directory() command."""
os.chdir(str(datafiles))
from lftools import cli
import lftools.nexus.release_docker_hub as rdh
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
def test_remove_http_from_url():
assert tags.repository_exist == True
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "nexus"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "nexus"),
+)
class TestTagsRegExpClass:
"""Test Tags and Regexp for Versions.
assert self.counter.cleanup == 90
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "nexus"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "nexus"),
+)
class TestFetchNexus3Catalog:
url = "https://nexus3.onap.org:10002/v2/_catalog"
creds = {"authtype": "token", "endpoint": "https://readthedocs.org/api/v3/", "token": "xyz"}
rtd = client.ReadTheDocs(creds=creds)
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_list(datafiles):
os.chdir(str(datafiles))
assert "TestProject1" in rtd.project_list()
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_details(datafiles):
os.chdir(str(datafiles))
assert "slug" in rtd.project_details("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_version_list(datafiles):
os.chdir(str(datafiles))
assert "test-trigger6" in rtd.project_version_list("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_version_details(datafiles):
os.chdir(str(datafiles))
)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_build_list(datafiles):
os.chdir(str(datafiles))
assert "success" in rtd.project_build_list("testproject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_project_build_details(datafiles):
os.chdir(str(datafiles))
assert rtd.project_build_trigger("testproject1", "latest")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_subproject_list(datafiles):
os.chdir(str(datafiles))
assert "testproject2" in rtd.subproject_list("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "rtd"),
+)
@responses.activate
def test_subproject_details(datafiles):
os.chdir(str(datafiles))
from lftools import cli
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "schema"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "schema"),
+)
def test_check_license(cli_runner, datafiles):
"""Test check_schema() command."""
os.chdir(str(datafiles))
from lftools import cli
-FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
+FIXTURE_DIR = os.path.join(
+ os.path.dirname(os.path.realpath(__file__)),
+ "fixtures",
+)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "version_bump"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "version_bump"),
+)
def test_version_bump(cli_runner, datafiles):
"""Test version bump command."""
os.chdir(str(datafiles))
assert filecmp.cmp(pom, expected_pom)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "version_release"),)
+@pytest.mark.datafiles(
+ os.path.join(FIXTURE_DIR, "version_release"),
+)
def test_version_release(cli_runner, datafiles):
"""Test version release command."""
os.chdir(str(datafiles))