This patch auto-formats most files contained in the lftools software.
Format changes are trivial, such as replacing a single quote with
double-quotes, and rearranging the length of certain lines. Lftools
will use Black for code formatting once this patch, and the test
harness overhaul patch, are merged.
Issue-ID: RELENG-2766
Signed-off-by: DW Talton <dtalton@contractor.linuxfoundation.org>
Change-Id: I74318e0685aa44617af4575016c84caafa68b5c5
import os
import sys
import sphinx_bootstrap_theme
-sys.path.insert(0, os.path.abspath('..'))
+
+sys.path.insert(0, os.path.abspath(".."))
from pbr.version import VersionInfo
from docs_conf.conf import *
-version = str(VersionInfo('lftools'))
-release = str(VersionInfo('lftools'))
+version = str(VersionInfo("lftools"))
+release = str(VersionInfo("lftools"))
# Linkcheck Options
-linkcheck_ignore= [
- r'https://testing\.googleblog\.com/.*',
- r'https://nexus\.example\.com',
+linkcheck_ignore = [
+ r"https://testing\.googleblog\.com/.*",
+ r"https://nexus\.example\.com",
]
##############################################################################
"""lftools package."""
-__author__ = 'Thanh Ha'
-__summary__ = 'Linux Foundation Release Engineering Tools'
+__author__ = "Thanh Ha"
+__summary__ = "Linux Foundation Release Engineering Tools"
import logging
import sys
class LogFormatter(logging.Formatter):
"""Custom log formatter."""
- default_fmt = logging.Formatter('%(levelname)s: %(message)s')
- debug_fmt = logging.Formatter(
- '%(levelname)s: %(name)s:%(lineno)d: %(message)s')
- info_fmt = logging.Formatter('%(message)s')
+ default_fmt = logging.Formatter("%(levelname)s: %(message)s")
+ debug_fmt = logging.Formatter("%(levelname)s: %(name)s:%(lineno)d: %(message)s")
+ info_fmt = logging.Formatter("%(message)s")
def format(self, record):
"""Format log messages depending on log level."""
self.password = self.creds["password"]
self.r = requests.Session()
self.r.auth = (self.username, self.password)
- self.r.headers.update({
- "Content-Type": "application/json; charset=UTF-8",
- "Accept": "application/json"
- })
+ self.r.headers.update({"Content-Type": "application/json; charset=UTF-8", "Accept": "application/json"})
if self.creds["authtype"] == "token":
self.token = self.creds["token"]
self.r = requests.Session()
- self.r.headers.update(
- {"Authorization": "Token {}".format(self.token)}
- )
+ self.r.headers.update({"Authorization": "Token {}".format(self.token)})
self.r.headers.update({"Content-Type": "application/json"})
def _request(self, url, method, data=None, timeout=30):
"""Execute the request."""
- resp = self.r.request(method,
- self.endpoint + url,
- data=data,
- timeout=timeout)
+ resp = self.r.request(method, self.endpoint + url, data=data, timeout=timeout)
# Some massaging to make our gerrit python code work
if resp.status_code == 409:
def __init__(self, **params):
"""Initialize the class."""
self.params = params
- self.fqdn = self.params['fqdn']
- if 'creds' not in self.params:
+ self.fqdn = self.params["fqdn"]
+ if "creds" not in self.params:
creds = {
- 'authtype': 'basic',
- 'username': config.get_setting(self.fqdn, 'username'),
- 'password': config.get_setting(self.fqdn, 'password'),
- 'endpoint': config.get_setting(self.fqdn, 'endpoint')
+ "authtype": "basic",
+ "username": config.get_setting(self.fqdn, "username"),
+ "password": config.get_setting(self.fqdn, "password"),
+ "endpoint": config.get_setting(self.fqdn, "endpoint"),
}
- params['creds'] = creds
+ params["creds"] = creds
super(Gerrit, self).__init__(**params)
filename /tmp/INFO.yaml
file_location="somedir/example-INFO.yaml"
"""
- signed_off_by = config.get_setting(fqdn, 'sob')
+ signed_off_by = config.get_setting(fqdn, "sob")
basename = os.path.basename(filename)
payload = self.create_change(basename, gerrit_project, issue_id, signed_off_by)
if file_location:
- file_location = urllib.parse.quote(file_location, safe='', encoding=None, errors=None)
+ file_location = urllib.parse.quote(file_location, safe="", encoding=None, errors=None)
basename = file_location
log.info(payload)
- access_str = 'changes/'
+ access_str = "changes/"
result = self.post(access_str, data=payload)[1]
- log.info(result['id'])
- changeid = (result['id'])
+ log.info(result["id"])
+ changeid = result["id"]
my_file = open(filename)
my_file_size = os.stat(filename)
- headers = {'Content-Type': 'text/plain',
- 'Content-length': '{}'.format(my_file_size)}
+ headers = {"Content-Type": "text/plain", "Content-length": "{}".format(my_file_size)}
self.r.headers.update(headers)
- access_str = 'changes/{}/edit/{}'.format(changeid, basename)
+ access_str = "changes/{}/edit/{}".format(changeid, basename)
payload = my_file
result = self.put(access_str, data=payload)
log.info(result)
- access_str = 'changes/{}/edit:publish'.format(changeid)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ access_str = "changes/{}/edit:publish".format(changeid)
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({
- "notify": "NONE",
- })
+ payload = json.dumps({"notify": "NONE",})
result = self.post(access_str, data=payload)
return result
##############################################################
"""
###############################################################
# Setup
- signed_off_by = config.get_setting(fqdn, 'sob')
+ signed_off_by = config.get_setting(fqdn, "sob")
gerrit_project_dashed = gerrit_project.replace("/", "-")
- gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
- filename = 'info-{}.yaml'.format(gerrit_project_dashed)
+ gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
+ filename = "info-{}.yaml".format(gerrit_project_dashed)
if not reviewid:
payload = self.create_change(filename, jjbrepo, issue_id, signed_off_by)
log.info(payload)
- access_str = 'changes/'
+ access_str = "changes/"
result = self.post(access_str, data=payload)[1]
log.info(result)
- log.info(result['id'])
- changeid = (result['id'])
+ log.info(result["id"])
+ changeid = result["id"]
else:
changeid = reviewid
jobs:
- gerrit-info-yaml-verify
project: {1}
- branch: master\n""".format(gerrit_project_dashed, gerrit_project)
- my_inline_file_size = len(my_inline_file.encode('utf-8'))
- headers = {'Content-Type': 'text/plain',
- 'Content-length': '{}'.format(my_inline_file_size)}
+ branch: master\n""".format(
+ gerrit_project_dashed, gerrit_project
+ )
+ my_inline_file_size = len(my_inline_file.encode("utf-8"))
+ headers = {"Content-Type": "text/plain", "Content-length": "{}".format(my_inline_file_size)}
self.r.headers.update(headers)
- access_str = 'changes/{0}/edit/jjb%2F{1}%2Finfo-{2}.yaml'.format(
- changeid, gerrit_project_encoded, gerrit_project_dashed)
+ access_str = "changes/{0}/edit/jjb%2F{1}%2Finfo-{2}.yaml".format(
+ changeid, gerrit_project_encoded, gerrit_project_dashed
+ )
payload = my_inline_file
log.info(access_str)
result = self.put(access_str, data=payload)
log.info(result)
- access_str = 'changes/{}/edit:publish'.format(changeid)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ access_str = "changes/{}/edit:publish".format(changeid)
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({
- "notify": "NONE",
- })
+ payload = json.dumps({"notify": "NONE",})
result = self.post(access_str, data=payload)
log.info(result)
- return(result)
+ return result
def vote_on_change(self, fqdn, gerrit_project, changeid, **kwargs):
"""Helper that votes on a change.
POST /changes/{change-id}/revisions/{revision-id}/review
"""
log.info(fqdn, gerrit_project, changeid)
- access_str = 'changes/{}/revisions/2/review'.format(changeid)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ access_str = "changes/{}/revisions/2/review".format(changeid)
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({
- "tag": "automation",
- "message": "Vote on file",
- "labels": {
- "Verified": +1,
- "Code-Review": +2,
- }
- })
+ payload = json.dumps(
+ {"tag": "automation", "message": "Vote on file", "labels": {"Verified": +1, "Code-Review": +2,}}
+ )
result = self.post(access_str, data=payload)
# Code for projects that don't allow self merge.
- if config.get_setting(self.fqdn + '.second'):
- second_username = config.get_setting(self.fqdn + '.second', 'username')
- second_password = config.get_setting(self.fqdn + '.second', 'password')
+ if config.get_setting(self.fqdn + ".second"):
+ second_username = config.get_setting(self.fqdn + ".second", "username")
+ second_password = config.get_setting(self.fqdn + ".second", "password")
self.r.auth = (second_username, second_password)
result = self.post(access_str, data=payload)
self.r.auth = (self.username, self.password)
def submit_change(self, fqdn, gerrit_project, changeid, payload, **kwargs):
"""Method so submit a change."""
# submit a change id
- access_str = 'changes/{}/submit'.format(changeid)
+ access_str = "changes/{}/submit".format(changeid)
log.info(access_str)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
result = self.post(access_str, data=payload)
return result
def abandon_changes(self, fqdn, gerrit_project, **kwargs):
"""."""
- gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
- access_str = 'changes/?q=project:{}'.format(gerrit_project_encoded)
+ gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
+ access_str = "changes/?q=project:{}".format(gerrit_project_encoded)
log.info(access_str)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
result = self.get(access_str)[1]
- payload = {'message': 'Abandoned by automation'}
+ payload = {"message": "Abandoned by automation"}
for id in result:
- if (id['status']) == "NEW":
- id = (id['id'])
- access_str = 'changes/{}/abandon'.format(id)
+ if (id["status"]) == "NEW":
+ id = id["id"]
+ access_str = "changes/{}/abandon".format(id)
log.info(access_str)
result = self.post(access_str, data=payload)[1]
return result
def create_change(self, filename, gerrit_project, issue_id, signed_off_by, **kwargs):
"""Method to create a gerrit change."""
if issue_id:
- subject = (
- 'Automation adds {0}\n\nIssue-ID: {1}\n\nSigned-off-by: {2}'.format(filename, issue_id, signed_off_by))
+ subject = "Automation adds {0}\n\nIssue-ID: {1}\n\nSigned-off-by: {2}".format(
+ filename, issue_id, signed_off_by
+ )
else:
- subject = (
- 'Automation adds {0}\n\nSigned-off-by: {1}'.format(filename, signed_off_by))
- payload = json.dumps({
- "project": '{}'.format(gerrit_project),
- "subject": '{}'.format(subject),
- "branch": 'master',
- })
+ subject = "Automation adds {0}\n\nSigned-off-by: {1}".format(filename, signed_off_by)
+ payload = json.dumps(
+ {"project": "{}".format(gerrit_project), "subject": "{}".format(subject), "branch": "master",}
+ )
return payload
def sanity_check(self, fqdn, gerrit_project, **kwargs):
"""Preform a sanity check."""
# Sanity check
- gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
- mylist = ['projects/', 'projects/{}'.format(gerrit_project_encoded)]
+ gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
+ mylist = ["projects/", "projects/{}".format(gerrit_project_encoded)]
for access_str in mylist:
log.info(access_str)
try:
gerrit_project test/test1
issue_id: CIMAN-33
"""
- signed_off_by = config.get_setting(fqdn, 'sob')
+ signed_off_by = config.get_setting(fqdn, "sob")
self.sanity_check(fqdn, gerrit_project)
###############################################################
payload = self.create_change(filename, gerrit_project, issue_id, signed_off_by)
log.info(payload)
- access_str = 'changes/'
+ access_str = "changes/"
result = self.post(access_str, data=payload)[1]
log.info(result)
- changeid = (result['id'])
+ changeid = result["id"]
###############################################################
# Add a file to a change set.
port=29418
project={1}
defaultbranch=master
- """.format(fqdn, gerrit_project)
- my_inline_file_size = len(my_inline_file.encode('utf-8'))
- headers = {'Content-Type': 'text/plain',
- 'Content-length': '{}'.format(my_inline_file_size)}
+ """.format(
+ fqdn, gerrit_project
+ )
+ my_inline_file_size = len(my_inline_file.encode("utf-8"))
+ headers = {"Content-Type": "text/plain", "Content-length": "{}".format(my_inline_file_size)}
self.r.headers.update(headers)
- access_str = 'changes/{}/edit/{}'.format(changeid, filename)
+ access_str = "changes/{}/edit/{}".format(changeid, filename)
payload = my_inline_file
result = self.put(access_str, data=payload)
exit(0)
else:
- access_str = 'changes/{}/edit:publish'.format(changeid)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ access_str = "changes/{}/edit:publish".format(changeid)
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
self.r.headers.update(headers)
- payload = json.dumps({
- "notify": "NONE",
- })
+ payload = json.dumps({"notify": "NONE",})
result = self.post(access_str, data=payload)
log.info(result)
###############################################################
# Github Rights
- gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
+ gerrit_project_encoded = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
# GET /groups/?m=test%2F HTTP/1.0
- access_str = 'groups/?m=GitHub%20Replication'
+ access_str = "groups/?m=GitHub%20Replication"
log.info(access_str)
result = self.get(access_str)[1]
time.sleep(5)
- githubid = (result['GitHub Replication']['id'])
+ githubid = result["GitHub Replication"]["id"]
log.info(githubid)
# POST /projects/MyProject/access HTTP/1.0
if githubid:
- payload = json.dumps({
- "add": {
- "refs/*": {
- "permissions": {
- "read": {
- "rules": {
- "{}".format(githubid): {
- "action": "{}".format("ALLOW")
- }}}}}}
- })
- access_str = 'projects/{}/access'.format(gerrit_project_encoded)
+ payload = json.dumps(
+ {
+ "add": {
+ "refs/*": {
+ "permissions": {
+ "read": {"rules": {"{}".format(githubid): {"action": "{}".format("ALLOW")}}}
+ }
+ }
+ }
+ }
+ )
+ access_str = "projects/{}/access".format(gerrit_project_encoded)
result = self.post(access_str, data=payload)[1]
pretty = json.dumps(result, indent=4, sort_keys=True)
log.info(pretty)
--description="This is a demo project"
"""
- gerrit_project = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
+ gerrit_project = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
- access_str = 'projects/{}'.format(gerrit_project)
+ access_str = "projects/{}".format(gerrit_project)
result = self.get(access_str)[0]
if result.status_code == 404:
ldapgroup = "ldap:cn={},ou=Groups,dc=freestandards,dc=org".format(ldap_group)
log.info(ldapgroup)
- access_str = 'projects/{}'.format(gerrit_project)
- payload = json.dumps({
- "description": "{}".format(description),
- "submit_type": "INHERIT",
- "create_empty_commit": "True",
- "owners": [
- "{}".format(ldapgroup)
- ]
- })
+ access_str = "projects/{}".format(gerrit_project)
+ payload = json.dumps(
+ {
+ "description": "{}".format(description),
+ "submit_type": "INHERIT",
+ "create_empty_commit": "True",
+ "owners": ["{}".format(ldapgroup)],
+ }
+ )
log.info(payload)
result = self.put(access_str, data=payload)
def list_project_permissions(self, project):
"""List a projects owners."""
- result = self.get('access/?project={}'.format(project))[1][project]['local']
+ result = self.get("access/?project={}".format(project))[1][project]["local"]
group_list = []
for k, v in result.items():
- for kk, vv in result[k]['permissions']['owner']['rules'].items():
- group_list.append(kk.replace('ldap:cn=', '').replace(',ou=Groups,dc=freestandards,dc=org', ''))
+ for kk, vv in result[k]["permissions"]["owner"]["rules"].items():
+ group_list.append(kk.replace("ldap:cn=", "").replace(",ou=Groups,dc=freestandards,dc=org", ""))
return group_list
def list_project_inherits_from(self, gerrit_project):
"""List who a project inherits from."""
- gerrit_project = urllib.parse.quote(gerrit_project, safe='', encoding=None, errors=None)
- result = self.get('projects/{}/access'.format(gerrit_project))[1]
- inherits = (result['inherits_from']['id'])
+ gerrit_project = urllib.parse.quote(gerrit_project, safe="", encoding=None, errors=None)
+ result = self.get("projects/{}/access".format(gerrit_project))[1]
+ inherits = result["inherits_from"]["id"]
return inherits
"""Nexus2 REST API interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import json
import logging
super(Nexus2, self).__init__(**params)
-############
-# Privileges
+ ############
+ # Privileges
def privilege_list(self):
"""List privileges."""
- result = self.get('service/local/privileges')[1]['data']
+ result = self.get("service/local/privileges")[1]["data"]
privilege_list = []
for privilege in result:
- privilege_list.append([
- privilege['name'],
- privilege['id']
- ])
+ privilege_list.append([privilege["name"], privilege["id"]])
privilege_list.sort()
return privilege_list
:param repo: the repo to attach to the privilege
"""
data = {
- 'data': {
- 'name': name,
- 'description': description,
- 'type': 'target',
- 'repositoryTargetId': 'any',
- 'repositoryId': repo,
- 'repositoryGroupId': '',
- 'method': ['create', 'read', 'update', 'delete']
+ "data": {
+ "name": name,
+ "description": description,
+ "type": "target",
+ "repositoryTargetId": "any",
+ "repositoryId": repo,
+ "repositoryGroupId": "",
+ "method": ["create", "read", "update", "delete"],
}
}
json_data = json.dumps(data)
- result = self.post('service/local/privileges_target', data=json_data)
+ result = self.post("service/local/privileges_target", data=json_data)
if result[0].status_code == 201:
- return 'Privilege successfully created.'
+ return "Privilege successfully created."
def privilege_delete(self, privilege_id):
"""Delete a privilege.
:param privilege_id: the ID of the privilege (from privilege list)
"""
- result = self.delete('service/local/privileges/{}'.format(privilege_id))
+ result = self.delete("service/local/privileges/{}".format(privilege_id))
if result.status_code == 204:
return "Privilege successfully deleted."
-##############
-# Repositories
+ ##############
+ # Repositories
def repo_list(self):
"""Get a list of repositories."""
- result = self.get('service/local/repositories')[1]['data']
+ result = self.get("service/local/repositories")[1]["data"]
repo_list = []
for repo in result:
- repo_list.append([
- repo['name'],
- repo['repoType'],
- repo['provider'],
- repo['id']
- ])
+ repo_list.append([repo["name"], repo["repoType"], repo["provider"], repo["id"]])
return repo_list
"provider": repo_provider,
"providerRole": "org.sonatype.nexus.proxy.repository.Repository",
"repoPolicy": repo_policy,
- 'repoType': repo_type
+ "repoType": repo_type,
}
}
if repo_type == "hosted":
- data['data'].update({
- "checksumPolicy": "IGNORE",
- "downloadRemoteIndexes": False,
- "writePolicy": "ALLOW_WRITE_ONCE"
- })
- if repo_provider == 'site':
- data['data'].update({
- 'repoPolicy': 'MIXED',
- 'writePolicy': 'ALLOW_WRITE',
- 'indexable': False,
- })
-
- if repo_type == 'proxy':
- data['data'].update({
- 'artifactMaxAge': -1,
- 'autoBlockActive': True,
- "checksumPolicy": "WARN",
- "downloadRemoteIndexes": True,
- 'fileTypeValidation': True,
- 'metadataMaxAge': 1440,
- 'remoteStorage': {
- 'authentication': None,
- 'connectionSettings': None,
- 'remoteStorageUrl': repo_upstream_url
+ data["data"].update(
+ {"checksumPolicy": "IGNORE", "downloadRemoteIndexes": False, "writePolicy": "ALLOW_WRITE_ONCE"}
+ )
+ if repo_provider == "site":
+ data["data"].update(
+ {"repoPolicy": "MIXED", "writePolicy": "ALLOW_WRITE", "indexable": False,}
+ )
+
+ if repo_type == "proxy":
+ data["data"].update(
+ {
+ "artifactMaxAge": -1,
+ "autoBlockActive": True,
+ "checksumPolicy": "WARN",
+ "downloadRemoteIndexes": True,
+ "fileTypeValidation": True,
+ "metadataMaxAge": 1440,
+ "remoteStorage": {
+ "authentication": None,
+ "connectionSettings": None,
+ "remoteStorageUrl": repo_upstream_url,
+ },
}
- })
+ )
json_data = json.dumps(data)
result = self.post("service/local/repositories", data=json_data)
:param repo_id: the ID of the repo from repo list.
"""
- result = self.delete('service/local/repositories/{}'.format(repo_id))
+ result = self.delete("service/local/repositories/{}".format(repo_id))
if result.status_code == 204:
return "Repo successfully deleted."
else:
exit(1)
-#######
-# Roles
+ #######
+ # Roles
def role_list(self):
"""List all roles."""
- result = self.get('service/local/roles')[1]
+ result = self.get("service/local/roles")[1]
role_list = []
- for role in result['data']:
+ for role in result["data"]:
# wacky string concat is to provide the right format
# so that tabulate will iterate the string at the newline
# breaks and show multiline columns in a nice way
- roles_string = ''
- privs_string = ''
- if 'roles' in role:
- for roles in role['roles']:
- roles_string += roles + '\n'
-
- if 'privileges' in role:
- for privs in role['privileges']:
- privs_string += privs + '\n'
-
- role_list.append([
- role['id'],
- role['name'],
- roles_string,
- privs_string
- ])
+ roles_string = ""
+ privs_string = ""
+ if "roles" in role:
+ for roles in role["roles"]:
+ roles_string += roles + "\n"
+
+ if "privileges" in role:
+ for privs in role["privileges"]:
+ privs_string += privs + "\n"
+
+ role_list.append([role["id"], role["name"], roles_string, privs_string])
return role_list
:param privs_list: (optional) a list of existing privs to attach to this role
"""
data = {
- 'data': {
- 'id': role_id,
- 'name': role_name,
- 'description': role_description,
- 'sessionTimeout': 0,
- 'userManaged': True
+ "data": {
+ "id": role_id,
+ "name": role_name,
+ "description": role_description,
+ "sessionTimeout": 0,
+ "userManaged": True,
}
}
if roles_list:
- data['data']['roles'] = roles_list.split(',')
+ data["data"]["roles"] = roles_list.split(",")
if privs_list:
- data['data']['privileges'] = privs_list.split(',')
+ data["data"]["privileges"] = privs_list.split(",")
json_data = json.dumps(data)
- result = self.post('service/local/roles', data=json_data)
+ result = self.post("service/local/roles", data=json_data)
if result[0].status_code == 201:
return "Role successfully created."
:param role_id: The ID of the role to delete (from role list)
"""
- result = self.delete('service/local/roles/{}'.format(role_id))
+ result = self.delete("service/local/roles/{}".format(role_id))
if result.status_code == 204:
return "Role successfully deleted."
-#######
-# Users
+ #######
+ # Users
def user_list(self):
"""List all users."""
- result = self.get('service/local/plexus_users/allConfigured')[1]['data']
+ result = self.get("service/local/plexus_users/allConfigured")[1]["data"]
user_list = []
for user in result:
role_list = []
- for role in user['roles']:
- role_list.append([
- role['roleId']
- ])
-
- user_list.append([
- user['userId'],
- user['firstName'],
- user['lastName'],
- user['status'],
- role_list
- ])
+ for role in user["roles"]:
+ role_list.append([role["roleId"]])
+
+ user_list.append([user["userId"], user["firstName"], user["lastName"], user["status"], role_list])
return user_list
:param email: the user's email address
:param roles: a comma-separated list of roles to add the user to
"""
- role_list = roles.split(',')
+ role_list = roles.split(",")
data = {
- 'data': {
- 'userId': username,
- 'firstName': firstname,
- 'lastName': lastname,
- 'status': 'active',
- 'email': email,
- 'roles': role_list
+ "data": {
+ "userId": username,
+ "firstName": firstname,
+ "lastName": lastname,
+ "status": "active",
+ "email": email,
+ "roles": role_list,
}
}
json_data = json.dumps(data)
- result = self.post('service/local/users', data=json_data)
+ result = self.post("service/local/users", data=json_data)
if result[0].status_code == 201:
return "User successfully created."
:param username: The username to delete (from user list)
"""
- result = self.delete('service/local/users/{}'.format(username))
+ result = self.delete("service/local/users/{}".format(username))
if result.status_code == 204:
return "User successfully deleted."
"""Nexus3 REST API interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import json
import logging
:param repository: repo name
"""
- result = self.get("v1/assets?repository={}".format(repository))[1][
- "items"
- ]
+ result = self.get("v1/assets?repository={}".format(repository))[1]["items"]
if not result:
return "This repository has no assets"
else:
:param repository: the repo name
"""
- result = self.get("v1/components?repository={}".format(repository))[1][
- "items"
- ]
+ result = self.get("v1/components?repository={}".format(repository))[1]["items"]
if not result:
return "This repository has no components"
else:
list_of_privileges = []
for privilege in result:
list_of_privileges.append(
- [
- privilege["type"],
- privilege["name"],
- privilege["description"],
- privilege["readOnly"],
- ]
+ [privilege["type"], privilege["name"], privilege["description"], privilege["readOnly"],]
)
return list_of_privileges
while token is not None:
for tag in result["items"]:
list_of_tags.append(tag["name"])
- result = self.get(
- "v1/tags?continuationToken={}".format(
- result["continuationToken"]
- )
- )[1]
+ result = self.get("v1/tags?continuationToken={}".format(result["continuationToken"]))[1]
token = result["continuationToken"]
else:
for tag in result["items"]:
list_of_tasks = []
for task in result:
list_of_tasks.append(
- [
- task["name"],
- task["message"],
- task["currentState"],
- task["lastRunResult"],
- ]
+ [task["name"], task["message"], task["currentState"], task["lastRunResult"],]
)
return list_of_tasks
"""
data = {"tag": tag}
json_data = json.dumps(data)
- result = self.post(
- "v1/staging/move/{}".format(destination_repo), data=json_data
- )
+ result = self.post("v1/staging/move/{}".format(destination_repo), data=json_data)
return result
def read_script(self, name):
"repository": repository,
}
json_data = json.dumps(data)
- result = self.get(
- "v1/search/assets?q={}&repository={}".format(query, repository),
- data=json_data,
- )[1]["items"]
+ result = self.get("v1/search/assets?q={}&repository={}".format(query, repository), data=json_data,)[1]["items"]
list_of_assets = []
if details:
"""Read the Docs REST API interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import json
def __init__(self, **params):
"""Initialize the class."""
self.params = params
- if 'creds' not in self.params:
+ if "creds" not in self.params:
creds = {
- 'authtype': 'token',
- 'token': config.get_setting('rtd', 'token'),
- 'endpoint': config.get_setting('rtd', 'endpoint')
+ "authtype": "token",
+ "token": config.get_setting("rtd", "token"),
+ "endpoint": config.get_setting("rtd", "endpoint"),
}
- params['creds'] = creds
+ params["creds"] = creds
super(ReadTheDocs, self).__init__(**params)
:param kwargs:
:return: [projects]
"""
- result = self.get('projects/?limit=999')[1] # NOQA
- data = result['results']
+ result = self.get("projects/?limit=999")[1] # NOQA
+ data = result["results"]
project_list = []
for project in data:
- if 'slug' in project:
- project_list.append(project['slug'])
+ if "slug" in project:
+ project_list.append(project["slug"])
return project_list
def project_details(self, project):
:param kwargs:
:return: {result}
"""
- result = self.get('projects/{}/?expand=active_versions'.format(project))[1]
+ result = self.get("projects/{}/?expand=active_versions".format(project))[1]
return result
def project_version_list(self, project):
:param project: The project's slug
:return: {result}
"""
- result = self.get('projects/{}/versions/?active=True'
- .format(project))[1]
+ result = self.get("projects/{}/versions/?active=True".format(project))[1]
more_results = None
versions = []
# I feel like there must be a better way...but, this works. -DWTalton
- initial_versions = result['results']
+ initial_versions = result["results"]
for version in initial_versions:
- versions.append(version['slug'])
+ versions.append(version["slug"])
- if result['next']:
- more_results = result['next'].rsplit('/', 1)[-1]
+ if result["next"]:
+ more_results = result["next"].rsplit("/", 1)[-1]
if more_results:
while more_results is not None:
- get_more_results = self.get('projects/{}/versions/'
- .format(project) + more_results)[1]
- more_results = get_more_results['next']
+ get_more_results = self.get("projects/{}/versions/".format(project) + more_results)[1]
+ more_results = get_more_results["next"]
- for version in get_more_results['results']:
- versions.append(version['slug'])
+ for version in get_more_results["results"]:
+ versions.append(version["slug"])
if more_results is not None:
- more_results = more_results.rsplit('/', 1)[-1]
+ more_results = more_results.rsplit("/", 1)[-1]
return versions
:param version: The version's slug
:return: {result}
"""
- result = self.get('projects/{}/versions/{}/'
- .format(project, version))[1]
+ result = self.get("projects/{}/versions/{}/".format(project, version))[1]
return json.dumps(result, indent=2)
def project_version_update(self, project, version, active):
:param active: 'true' or 'false'
:return: {result}
"""
- data = {
- "active": active
- }
+ data = {"active": active}
json_data = json.dumps(data)
- result = self.patch('projects/{}/versions/{}/'.format(project, version),
- data=json_data)
+ result = self.patch("projects/{}/versions/{}/".format(project, version), data=json_data)
return result
def project_update(self, project, *args):
"""
data = args[0]
json_data = json.dumps(data)
- result = self.patch('projects/{}/'.format(project), data=json_data)
+ result = self.patch("projects/{}/".format(project), data=json_data)
if result.status_code == 204:
return True, result.status_code
else:
return False, result.status_code
- def project_create(self, name, repository_url, repository_type, homepage,
- programming_language, language, **kwargs):
+ def project_create(self, name, repository_url, repository_type, homepage, programming_language, language, **kwargs):
"""Create a new Read the Docs project.
:param name: Project name. Any spaces will convert to dashes for the
:return: {results}
"""
data = {
- 'name': name,
- 'repository': {
- 'url': repository_url,
- 'type': repository_type
- },
- 'homepage': homepage,
- 'programming_language': programming_language,
- 'language': language
+ "name": name,
+ "repository": {"url": repository_url, "type": repository_type},
+ "homepage": homepage,
+ "programming_language": programming_language,
+ "language": language,
}
json_data = json.dumps(data)
- result = self.post('projects/', data=json_data, **kwargs)
+ result = self.post("projects/", data=json_data, **kwargs)
return result
def project_build_list(self, project, **kwargs):
:param kwargs:
:return: {result}
"""
- result = self.get('projects/{}/builds/?running=True'
- .format(project), **kwargs)[1]
+ result = self.get("projects/{}/builds/?running=True".format(project), **kwargs)[1]
- if result['count'] > 0:
+ if result["count"] > 0:
return json.dumps(result, indent=2)
else:
return "There are no active builds."
:param kwargs:
:return: {result}
"""
- result = self.get('projects/{}/builds/{}/'
- .format(project, build_id))[1]
+ result = self.get("projects/{}/builds/{}/".format(project, build_id))[1]
return json.dumps(result, indent=2)
def project_build_trigger(self, project, version):
(must be an active version)
:return: {result}
"""
- result = self.post('projects/{}/versions/{}/builds/'
- .format(project, version))[1]
+ result = self.post("projects/{}/versions/{}/builds/".format(project, version))[1]
return json.dumps(result, indent=2)
def subproject_list(self, project):
:param kwargs:
:return: [subprojects]
"""
- result = self.get('projects/{}/subprojects/?limit=999'.format(project))[1] # NOQA
- data = result['results']
+ result = self.get("projects/{}/subprojects/?limit=999".format(project))[1] # NOQA
+ data = result["results"]
subproject_list = []
for subproject in data:
- subproject_list.append(subproject['child']['slug'])
+ subproject_list.append(subproject["child"]["slug"])
return subproject_list
:param subproject:
:return:
"""
- result = self.get('projects/{}/subprojects/{}/'
- .format(project, subproject))[1]
+ result = self.get("projects/{}/subprojects/{}/".format(project, subproject))[1]
return result
def subproject_create(self, project, subproject, alias=None):
:param alias: An alias (not required). (user-defined slug)
:return:
"""
- data = {
- 'child': subproject,
- 'alias': alias
- }
+ data = {"child": subproject, "alias": alias}
json_data = json.dumps(data)
- result = self.post('projects/{}/subprojects/'
- .format(project), data=json_data)
+ result = self.post("projects/{}/subprojects/".format(project), data=json_data)
return result
def subproject_delete(self, project, subproject):
:param subproject:
:return:
"""
- result = self.delete('projects/{}/subprojects/{}/'
- .format(project, subproject))
+ result = self.delete("projects/{}/subprojects/{}/".format(project, subproject))
- if hasattr(result, 'status_code'):
+ if hasattr(result, "status_code"):
if result.status_code == 204:
return True
else:
@click.group()
-@click.option('--debug', envvar='DEBUG', is_flag=True, default=False)
-@click.option('--password', envvar='LFTOOLS_PASSWORD', default=None)
-@click.option('--username', envvar='LFTOOLS_USERNAME', default=None)
-@click.option('-i', '--interactive', is_flag=True, default=False)
+@click.option("--debug", envvar="DEBUG", is_flag=True, default=False)
+@click.option("--password", envvar="LFTOOLS_PASSWORD", default=None)
+@click.option("--username", envvar="LFTOOLS_USERNAME", default=None)
+@click.option("-i", "--interactive", is_flag=True, default=False)
@click.pass_context
@click.version_option()
def cli(ctx, debug, interactive, password, username):
if debug:
logging.getLogger("").setLevel(logging.DEBUG)
- ctx.obj['DEBUG'] = debug
- log.debug('DEBUG mode enabled.')
+ ctx.obj["DEBUG"] = debug
+ log.debug("DEBUG mode enabled.")
# Start > Credentials
if username is None:
if interactive:
- username = input('Username: ')
+ username = input("Username: ")
else:
try:
- username = conf.get_setting('global', 'username')
- except (configparser.NoOptionError,
- configparser.NoSectionError) as e:
+ username = conf.get_setting("global", "username")
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
username = None
if password is None:
if interactive:
- password = getpass.getpass('Password: ')
+ password = getpass.getpass("Password: ")
else:
try:
- password = conf.get_setting('global', 'password')
- except (configparser.NoOptionError,
- configparser.NoSectionError) as e:
+ password = conf.get_setting("global", "password")
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
password = None
- ctx.obj['username'] = username
- ctx.obj['password'] = password
+ ctx.obj["username"] = username
+ ctx.obj["password"] = password
# End > Credentials
cli.add_command(config_sys)
cli.add_command(deploy)
cli.add_command(dco)
-cli.add_command(gerrit_cli, name='gerrit')
-cli.add_command(github_cli, name='github')
+cli.add_command(gerrit_cli, name="gerrit")
+cli.add_command(github_cli, name="github")
cli.add_command(infofile)
-cli.add_command(jenkins_cli, name='jenkins')
+cli.add_command(jenkins_cli, name="jenkins")
cli.add_command(license)
cli.add_command(nexus)
cli.add_command(nexus_two)
try:
from lftools.cli.ldap_cli import ldap_cli
- cli.add_command(ldap_cli, name='ldap')
+
+ cli.add_command(ldap_cli, name="ldap")
except ImportError:
from lftools.cli.no_cmd import no_ldap as ldap_cli
- cli.add_command(ldap_cli, name='ldap')
+
+ cli.add_command(ldap_cli, name="ldap")
try:
from lftools.openstack.cmd import openstack
+
cli.add_command(openstack)
except ImportError:
from lftools.openstack.no_cmd import openstack
+
cli.add_command(openstack)
cli(obj={})
-if __name__ == '__main__':
+if __name__ == "__main__":
main()
##############################################################################
"""CLI interface for config subsystem."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
import sys
log = logging.getLogger(__name__)
-@click.group(name='config')
+@click.group(name="config")
@click.pass_context
def config_sys(ctx):
"""Configuration subsystem."""
pass
-@click.command(name='get')
-@click.argument('section', type=str)
-@click.argument('option', type=str, required=False)
+@click.command(name="get")
+@click.argument("section", type=str)
+@click.argument("option", type=str, required=False)
@click.pass_context
def get_setting(ctx, section, option):
"""Print section or setting from config file."""
try:
result = config.get_setting(section, option)
- except (configparser.NoOptionError,
- configparser.NoSectionError) as e:
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
log.error(e)
sys.exit(1)
if isinstance(result, list):
for i in result:
- log.info('{}: {}'.format(i, config.get_setting(section, i)))
+ log.info("{}: {}".format(i, config.get_setting(section, i)))
else:
log.info(result)
-@click.command(name='set')
-@click.argument('section')
-@click.argument('option')
-@click.argument('value')
+@click.command(name="set")
+@click.argument("section")
+@click.argument("option")
+@click.argument("value")
@click.pass_context
def set_setting(ctx, section, option, value):
"""Set a setting in the config file."""
- log.debug('Set config\n[{}]\n{}:{}'.format(section, option, value))
+ log.debug("Set config\n[{}]\n{}:{}".format(section, option, value))
config.set_setting(section, option, value)
##############################################################################
"""Script to check a git repository for commits missing DCO."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import sys
@click.command()
-@click.argument('repo-path', required=False)
+@click.argument("repo-path", required=False)
@click.pass_context
def check(ctx, repo_path):
"""Check repository for commits missing DCO.
@click.command()
-@click.argument('repo-path', required=False)
+@click.argument("repo-path", required=False)
@click.pass_context
def match(ctx, repo_path):
"""Check for commits whose DCO does not match the commit author's email.
##############################################################################
"""Script to deploy files to a Nexus sites repository."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
@click.command()
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('nexus-path', envvar='NEXUS_PATH')
-@click.argument('workspace', envvar='WORKSPACE')
-@click.option('-p', '--pattern', multiple=True)
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("nexus-path", envvar="NEXUS_PATH")
+@click.argument("workspace", envvar="WORKSPACE")
+@click.option("-p", "--pattern", multiple=True)
@click.pass_context
def archives(ctx, nexus_url, nexus_path, workspace, pattern):
"""Archive files to a Nexus site repository.
except OSError as e:
deploy_sys._log_error_and_exit(str(e))
- log.info('Archives upload complete.')
+ log.info("Archives upload complete.")
-@click.command(name='copy-archives')
-@click.argument('workspace', envvar='WORKSPACE')
-@click.argument('pattern', nargs=-1, default=None, required=False)
+@click.command(name="copy-archives")
+@click.argument("workspace", envvar="WORKSPACE")
+@click.argument("pattern", nargs=-1, default=None, required=False)
@click.pass_context
def copy_archives(ctx, workspace, pattern):
"""Copy files for archiving.
@click.command()
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('nexus-repo-id')
-@click.argument('group-id')
-@click.argument('artifact-id')
-@click.argument('version')
-@click.argument('packaging')
-@click.argument('file')
-@click.option('-c', '--classifier', default='')
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("nexus-repo-id")
+@click.argument("group-id")
+@click.argument("artifact-id")
+@click.argument("version")
+@click.argument("packaging")
+@click.argument("file")
+@click.option("-c", "--classifier", default="")
@click.pass_context
-def file(ctx,
- nexus_url,
- nexus_repo_id,
- group_id,
- artifact_id,
- version,
- packaging,
- classifier,
- file):
+def file(ctx, nexus_url, nexus_repo_id, group_id, artifact_id, version, packaging, classifier, file):
"""Upload file to Nexus as a Maven artifact using cURL.
This function will upload an artifact to Nexus while providing all of
"""
try:
deploy_sys.upload_maven_file_to_nexus(
- nexus_url, nexus_repo_id,
- group_id, artifact_id, version,
- packaging, file, classifier)
+ nexus_url, nexus_repo_id, group_id, artifact_id, version, packaging, file, classifier
+ )
except HTTPError as e:
log.error(str(e))
sys.exit(1)
- log.info('Upload maven file to nexus completed.')
+ log.info("Upload maven file to nexus completed.")
@click.command()
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('nexus-path', envvar='NEXUS_PATH')
-@click.argument('build-url', envvar='BUILD_URL')
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("nexus-path", envvar="NEXUS_PATH")
+@click.argument("build-url", envvar="BUILD_URL")
@click.pass_context
def logs(ctx, nexus_url, nexus_path, build_url):
"""Deploy logs to a Nexus site repository.
log.error(str(e))
sys.exit(1)
- log.info('Logs upload complete.')
+ log.info("Logs upload complete.")
-@click.command(name='maven-file')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('repo-id', envvar='REPO_ID')
-@click.argument('file-name', envvar='FILE_NAME')
+@click.command(name="maven-file")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("repo-id", envvar="REPO_ID")
+@click.argument("file-name", envvar="FILE_NAME")
# Maven Config
-@click.option('-b', '--maven-bin', envvar='MAVEN_BIN',
- help='Path of maven binary.')
-@click.option('-gs', '--global-settings', envvar='GLOBAL_SETTINGS_FILE',
- help='Global settings file.')
-@click.option('-s', '--settings', envvar='SETTINGS_FILE',
- help='Settings file.')
-@click.option('-p', '--maven-params',
- help='Pass Maven commandline options to the mvn command.')
+@click.option("-b", "--maven-bin", envvar="MAVEN_BIN", help="Path of maven binary.")
+@click.option("-gs", "--global-settings", envvar="GLOBAL_SETTINGS_FILE", help="Global settings file.")
+@click.option("-s", "--settings", envvar="SETTINGS_FILE", help="Settings file.")
+@click.option("-p", "--maven-params", help="Pass Maven commandline options to the mvn command.")
# Maven Artifact GAV
-@click.option('-a', '--artifact-id',
- help='Maven Artifact ID.')
-@click.option('-c', '--classifier',
- help='File classifier.')
-@click.option('-f', '--pom-file',
- help='Pom file to extract GAV information from.')
-@click.option('-g', '--group-id',
- help='Maven Group ID')
-@click.option('-v', '--version',
- help='Maven artifact version.')
+@click.option("-a", "--artifact-id", help="Maven Artifact ID.")
+@click.option("-c", "--classifier", help="File classifier.")
+@click.option("-f", "--pom-file", help="Pom file to extract GAV information from.")
+@click.option("-g", "--group-id", help="Maven Group ID")
+@click.option("-v", "--version", help="Maven artifact version.")
@click.pass_context
def maven_file(
# Maven Config
- ctx, nexus_url, repo_id, file_name,
- maven_bin, global_settings, settings,
+ ctx,
+ nexus_url,
+ repo_id,
+ file_name,
+ maven_bin,
+ global_settings,
+ settings,
maven_params,
# Maven GAV
- artifact_id, group_id, classifier, version,
- pom_file):
+ artifact_id,
+ group_id,
+ classifier,
+ version,
+ pom_file,
+):
"""Deploy a file to a Nexus maven2 repository.
As this script uses mvn to deploy. The server configuration should be
If pom-file is passed in via the "-f" option then the Maven GAV parameters
are not necessary. pom-file setting overrides the Maven GAV parameters.
"""
- params = ['deploy', 'maven-file']
+ params = ["deploy", "maven-file"]
# Maven Configuration
if maven_bin:
@click.command()
-@click.argument('nexus-repo-url', envvar='NEXUS_REPO_URL')
-@click.argument('deploy-dir', envvar='DEPLOY_DIR')
-@click.option('-s', '--snapshot', is_flag=True, default=False,
- help='Deploy a snapshot repo.')
+@click.argument("nexus-repo-url", envvar="NEXUS_REPO_URL")
+@click.argument("deploy-dir", envvar="DEPLOY_DIR")
+@click.option("-s", "--snapshot", is_flag=True, default=False, help="Deploy a snapshot repo.")
@click.pass_context
def nexus(ctx, nexus_repo_url, deploy_dir, snapshot):
"""Deploy a Maven repository to a specified Nexus repository.
deploy_sys._log_error_and_exit(str(e))
-@click.command(name='nexus-stage')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('staging-profile-id', envvar='STAGING_PROFILE_ID')
-@click.argument('deploy-dir', envvar='DEPLOY_DIR')
+@click.command(name="nexus-stage")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("staging-profile-id", envvar="STAGING_PROFILE_ID")
+@click.argument("deploy-dir", envvar="DEPLOY_DIR")
@click.pass_context
def nexus_stage(ctx, nexus_url, staging_profile_id, deploy_dir):
"""Deploy a Maven repository to a Nexus staging repository.
This script takes a local Maven repository and deploys it to a Nexus
staging repository as defined by the staging-profile-id.
"""
- deploy_sys.deploy_nexus_stage(nexus_url,
- staging_profile_id,
- deploy_dir)
+ deploy_sys.deploy_nexus_stage(nexus_url, staging_profile_id, deploy_dir)
-@click.command(name='nexus-stage-repo-close')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('staging-profile-id', envvar='STAGING_PROFILE_ID')
-@click.argument('staging-repo-id')
+@click.command(name="nexus-stage-repo-close")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("staging-profile-id", envvar="STAGING_PROFILE_ID")
+@click.argument("staging-repo-id")
@click.pass_context
def nexus_stage_repo_close(ctx, nexus_url, staging_profile_id, staging_repo_id):
"""Close a Nexus staging repo."""
- deploy_sys.nexus_stage_repo_close(nexus_url,
- staging_profile_id,
- staging_repo_id)
+ deploy_sys.nexus_stage_repo_close(nexus_url, staging_profile_id, staging_repo_id)
-@click.command(name='nexus-stage-repo-create')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('staging-profile-id', envvar='STAGING_PROFILE_ID')
+@click.command(name="nexus-stage-repo-create")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("staging-profile-id", envvar="STAGING_PROFILE_ID")
@click.pass_context
def nexus_stage_repo_create(ctx, nexus_url, staging_profile_id):
"""Create a Nexus staging repo."""
- staging_repo_id = deploy_sys.nexus_stage_repo_create(nexus_url,
- staging_profile_id)
+ staging_repo_id = deploy_sys.nexus_stage_repo_create(nexus_url, staging_profile_id)
log.info(staging_repo_id)
-@click.command(name='nexus-zip')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('nexus-repo', envvar='NEXUS_REPO')
-@click.argument('nexus-path', envvar='NEXUS_PATH')
-@click.argument('deploy-zip', envvar='DEPLOY_DIR')
+@click.command(name="nexus-zip")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("nexus-repo", envvar="NEXUS_REPO")
+@click.argument("nexus-path", envvar="NEXUS_PATH")
+@click.argument("deploy-zip", envvar="DEPLOY_DIR")
@click.pass_context
def nexus_zip(ctx, nexus_url, nexus_repo, nexus_path, deploy_zip):
"""Deploy zip file containing artifacts to Nexus using cURL.
log.error(str(e))
sys.exit(1)
- log.info('Zip file upload complete.')
+ log.info("Zip file upload complete.")
deploy.add_command(archives)
pass
-@click.command(name='addfile')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
-@click.argument('filename')
-@click.option('--issue_id', type=str, required=False,
- help='For projects that enforce an issue id for changesets')
-@click.option('--file_location', type=str, required=False,
- help='option allos you to specify full path and file name')
+@click.command(name="addfile")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
+@click.argument("filename")
+@click.option("--issue_id", type=str, required=False, help="For projects that enforce an issue id for changesets")
+@click.option("--file_location", type=str, required=False, help="option allos you to specify full path and file name")
@click.pass_context
def addfile(ctx, gerrit_fqdn, gerrit_project, filename, issue_id, file_location):
"""Add an file for review to a Project.
log.info(pformat(data))
-@click.command(name='addinfojob')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
-@click.argument('jjbrepo')
-@click.option('--reviewid', type=str, required=False,
- help='ammend a review rather than making a new one')
-@click.option('--issue_id', type=str, required=False,
- help='For projects that enforce an issue id for changesets')
+@click.command(name="addinfojob")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
+@click.argument("jjbrepo")
+@click.option("--reviewid", type=str, required=False, help="ammend a review rather than making a new one")
+@click.option("--issue_id", type=str, required=False, help="For projects that enforce an issue id for changesets")
@click.pass_context
def addinfojob(ctx, gerrit_fqdn, gerrit_project, jjbrepo, reviewid, issue_id):
"""Add an INFO job for a new Project.
log.info(pformat(data))
-@click.command(name='addgitreview')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
-@click.option('--issue_id', type=str, required=False,
- help='For projects that enforce an issue id for changesets')
+@click.command(name="addgitreview")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
+@click.option("--issue_id", type=str, required=False, help="For projects that enforce an issue id for changesets")
@click.pass_context
def addgitreview(ctx, gerrit_fqdn, gerrit_project, issue_id):
"""Add git review to a project.
log.info(pformat(data))
-@click.command(name='addgithubrights')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
+@click.command(name="addgithubrights")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
@click.pass_context
def addgithubrights(ctx, gerrit_fqdn, gerrit_project):
"""Grant Github read for a project.
log.info(pformat(data))
-@click.command(name='abandonchanges')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
+@click.command(name="abandonchanges")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
@click.pass_context
def abandonchanges(ctx, gerrit_fqdn, gerrit_project):
"""Abandon all OPEN changes for a gerrit project.
data = g.abandon_changes(gerrit_fqdn, gerrit_project)
log.info(pformat(data))
+
# Creates a gerrit project if project does not exist and adds ldap group as owner.
# Limits: does not support inherited permissions from other than All-Projects.
-@click.command(name='createproject')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
-@click.argument('ldap_group')
-@click.option('--description', type=str, required=True,
- help='Project Description')
-@click.option('--check', is_flag=True,
- help='just check if the project exists')
+@click.command(name="createproject")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
+@click.argument("ldap_group")
+@click.option("--description", type=str, required=True, help="Project Description")
+@click.option("--check", is_flag=True, help="just check if the project exists")
@click.pass_context
def createproject(ctx, gerrit_fqdn, gerrit_project, ldap_group, description, check):
"""Create a project via the gerrit API.
log.info(pformat(data))
-@click.command(name='list-project-permissions')
-@click.argument('gerrit_fqdn')
-@click.argument('project')
+@click.command(name="list-project-permissions")
+@click.argument("gerrit_fqdn")
+@click.argument("project")
@click.pass_context
def list_project_permissions(ctx, gerrit_fqdn, project):
"""List Owners of a Project."""
log.info(pformat(ldap_group))
-@click.command(name='list-project-inherits-from')
-@click.argument('gerrit_fqdn')
-@click.argument('gerrit_project')
+@click.command(name="list-project-inherits-from")
+@click.argument("gerrit_fqdn")
+@click.argument("gerrit_project")
@click.pass_context
def list_project_inherits_from(ctx, gerrit_fqdn, gerrit_project):
"""List who a project inherits from."""
pass
-@click.command(name='submit-pr')
-@click.argument('organization')
-@click.argument('repo')
-@click.argument('pr', type=int)
+@click.command(name="submit-pr")
+@click.argument("organization")
+@click.argument("repo")
+@click.argument("pr", type=int)
@click.pass_context
def submit_pr(ctx, organization, repo, pr):
"""Submit a pr if mergeable."""
sys.exit(1)
-@click.command(name='votes')
-@click.argument('organization')
-@click.argument('repo')
-@click.argument('pr', type=int)
+@click.command(name="votes")
+@click.argument("organization")
+@click.argument("repo")
+@click.argument("pr", type=int)
@click.pass_context
def votes(ctx, organization, repo, pr):
"""Helper for votes."""
print("Approvals:", approval_list)
-@click.command(name='list')
-@click.argument('organization')
-@click.option('--audit', is_flag=True, required=False,
- help='List members without 2fa')
-@click.option('--repos', is_flag=True, required=False,
- help='List all repos')
-@click.option('--full', is_flag=True, required=False,
- help='All members and their respective teams')
-@click.option('--teams', is_flag=True, required=False,
- help='List avaliable teams')
-@click.option('--team', type=str, required=False,
- help='List members of a team')
-@click.option('--repofeatures', is_flag=True, required=False,
- help='List enabled features for repos in an org')
+@click.command(name="list")
+@click.argument("organization")
+@click.option("--audit", is_flag=True, required=False, help="List members without 2fa")
+@click.option("--repos", is_flag=True, required=False, help="List all repos")
+@click.option("--full", is_flag=True, required=False, help="All members and their respective teams")
+@click.option("--teams", is_flag=True, required=False, help="List avaliable teams")
+@click.option("--team", type=str, required=False, help="List members of a team")
+@click.option("--repofeatures", is_flag=True, required=False, help="List enabled features for repos in an org")
@click.pass_context
def list(ctx, organization, repos, audit, full, teams, team, repofeatures):
"""List options for github org repos."""
helper_list(ctx, organization, repos, audit, full, teams, team, repofeatures)
-@click.command(name='create-repo')
-@click.argument('organization')
-@click.argument('repository')
-@click.argument('description')
-@click.option('--has_issues', is_flag=True, required=False,
- help='Repo should have issues')
-@click.option('--has_projects', is_flag=True, required=False,
- help='Repo should have projects')
-@click.option('--has_wiki', is_flag=True, required=False,
- help='Repo should have wiki')
+@click.command(name="create-repo")
+@click.argument("organization")
+@click.argument("repository")
+@click.argument("description")
+@click.option("--has_issues", is_flag=True, required=False, help="Repo should have issues")
+@click.option("--has_projects", is_flag=True, required=False, help="Repo should have projects")
+@click.option("--has_wiki", is_flag=True, required=False, help="Repo should have wiki")
@click.pass_context
def createrepo(ctx, organization, repository, description, has_issues, has_projects, has_wiki):
"""Create a Github repo within an Organization.
print(ghe)
-@click.command(name='update-repo')
-@click.argument('organization')
-@click.argument('repository')
-@click.option('--has_issues', is_flag=True, required=False,
- help='Repo should have issues')
-@click.option('--has_projects', is_flag=True, required=False,
- help='Repo should have projects')
-@click.option('--has_wiki', is_flag=True, required=False,
- help='Repo should have wiki')
-@click.option('--add_team', type=str, required=False,
- help='Add team to repo')
-@click.option('--remove_team', type=str, required=False,
- help='remove team from repo')
+@click.command(name="update-repo")
+@click.argument("organization")
+@click.argument("repository")
+@click.option("--has_issues", is_flag=True, required=False, help="Repo should have issues")
+@click.option("--has_projects", is_flag=True, required=False, help="Repo should have projects")
+@click.option("--has_wiki", is_flag=True, required=False, help="Repo should have wiki")
+@click.option("--add_team", type=str, required=False, help="Add team to repo")
+@click.option("--remove_team", type=str, required=False, help="remove team from repo")
@click.pass_context
def updaterepo(ctx, organization, repository, has_issues, has_projects, has_wiki, add_team, remove_team):
"""Update a Github repo within an Organization.
for repo in repos:
if repo.name == repository:
- repo_actual = (repo)
+ repo_actual = repo
try:
repo_actual
team.remove_from_repos(repo_actual)
-@click.command(name='create-team')
-@click.argument('organization')
-@click.argument('name')
-@click.argument('privacy')
-@click.option('--repo', type=str, required=False,
- help='Assign team to repo')
+@click.command(name="create-team")
+@click.argument("organization")
+@click.argument("name")
+@click.argument("privacy")
+@click.option("--repo", type=str, required=False, help="Assign team to repo")
@click.pass_context
def createteam(ctx, organization, name, repo, privacy):
"""Create a Github team within an Organization.
if repo:
try:
- org.create_team(
- name=name,
- repo_names=repos,
- privacy=privacy
- )
+ org.create_team(name=name, repo_names=repos, privacy=privacy)
except GithubException as ghe:
print(ghe)
if not repo:
try:
- org.create_team(
- name=name,
- privacy=privacy
- )
+ org.create_team(name=name, privacy=privacy)
except GithubException as ghe:
print(ghe)
-@click.command(name='user')
-@click.argument('organization')
-@click.argument('user')
-@click.argument('team')
-@click.option('--delete', is_flag=True, required=False,
- help='Remove user from org')
-@click.option('--admin', is_flag=True, required=False,
- help='User is admin for org, or a maintaner of a team')
+@click.command(name="user")
+@click.argument("organization")
+@click.argument("user")
+@click.argument("team")
+@click.option("--delete", is_flag=True, required=False, help="Remove user from org")
+@click.option("--admin", is_flag=True, required=False, help="User is admin for org, or a maintaner of a team")
@click.pass_context
def user(ctx, organization, user, team, delete, admin):
"""Add and Remove users from an org team."""
pass
-@click.command(name='create-info-file')
-@click.argument('gerrit_url', required=True)
-@click.argument('gerrit_project', required=True)
-@click.option('--directory', type=str, required=False, default="r",
- help='custom gerrit directory, eg not /r/')
-@click.option('--empty', is_flag=True, required=False,
- help='Create info file for uncreated project.')
-@click.option('--tsc_approval', type=str, required=False, default="missing",
- help='optionally provde a tsc approval link')
+@click.command(name="create-info-file")
+@click.argument("gerrit_url", required=True)
+@click.argument("gerrit_project", required=True)
+@click.option("--directory", type=str, required=False, default="r", help="custom gerrit directory, eg not /r/")
+@click.option("--empty", is_flag=True, required=False, help="Create info file for uncreated project.")
+@click.option(
+ "--tsc_approval", type=str, required=False, default="missing", help="optionally provde a tsc approval link"
+)
@click.pass_context
def create_info_file(ctx, gerrit_url, gerrit_project, directory, empty, tsc_approval):
"""Create an initial INFO file.
gerrit_url example: gerrit.umbrella.com
directory example: /gerrit/ (rather than most projects /r/)
"""
- url = ("https://{}/{}".format(gerrit_url, directory))
+ url = "https://{}/{}".format(gerrit_url, directory)
projectid_encoded = gerrit_project.replace("/", "%2F")
# project name with only underscores for info file anchors.
# project name with only dashes for ldap groups.
pass1 = config.get_setting("gerrit", "password")
auth = HTTPBasicAuth(user, pass1)
rest = GerritRestAPI(url=url, auth=auth)
- access_str = 'projects/{}/access'.format(projectid_encoded)
- headers = {'Content-Type': 'application/json; charset=UTF-8'}
+ access_str = "projects/{}/access".format(projectid_encoded)
+ headers = {"Content-Type": "application/json; charset=UTF-8"}
result = rest.get(access_str, headers=headers)
- if 'inherits_from' in result:
- inherits = (result['inherits_from']['id'])
+ if "inherits_from" in result:
+ inherits = result["inherits_from"]["id"]
if inherits != "All-Projects":
print(" Inherits from:", inherits)
print("Better Check this unconventional inherit")
try:
- owner = (result['local']['refs/*']['permissions']['owner']['rules'])
+ owner = result["local"]["refs/*"]["permissions"]["owner"]["rules"]
except:
print("ERROR: Check project config, no owner set!")
for x in owner:
match = re.search(r"[^=]+(?=,)", x)
- ldap_group = (match.group(0))
+ ldap_group = match.group(0)
- if umbrella == 'o-ran-sc':
+ if umbrella == "o-ran-sc":
umbrella = "oran"
- date = (datetime.datetime.now().strftime("%Y-%m-%d"))
+ date = datetime.datetime.now().strftime("%Y-%m-%d")
ldap_group = "{}-gerrit-{}-committers".format(umbrella, project_dashed)
server: 'freenode.net'
channel: '#{1}'
repeats: ''
- time: ''""".format(project_underscored, umbrella, umbrella_tld, date)
+ time: ''""".format(
+ project_underscored, umbrella, umbrella_tld, date
+ )
tsc_string = """
tsc:
- type: ''
name: ''
link: ''
-""".format(tsc_approval, end='')
+""".format(
+ tsc_approval, end=""
+ )
empty_committer = """ - name: ''
email: ''
company: ''
print("repositories:")
print(" - {}".format(gerrit_project))
print("committers:")
- print(" - <<: *{1}_{0}_ptl".format(project_underscored, umbrella, end=''))
+ print(" - <<: *{1}_{0}_ptl".format(project_underscored, umbrella, end=""))
if not empty:
this = helper_yaml4info(ldap_group)
- print(this, end='')
+ print(this, end="")
else:
- print(empty_committer, end='')
+ print(empty_committer, end="")
print(tsc_string)
-@click.command(name='get-committers')
-@click.argument('file', envvar='FILE_NAME', required=True)
-@click.option('--full', type=bool, required=False,
- help='Output name email and id for all committers in an infofile')
-@click.option('--id', type=str, required=False,
- help='Full output for a specific LFID')
+@click.command(name="get-committers")
+@click.argument("file", envvar="FILE_NAME", required=True)
+@click.option("--full", type=bool, required=False, help="Output name email and id for all committers in an infofile")
+@click.option("--id", type=str, required=False, help="Full output for a specific LFID")
@click.pass_context
def get_committers(ctx, file, full, id):
"""Extract Committer info from INFO.yaml or LDAP dump."""
- with open(file, 'r') as yaml_file:
+ with open(file, "r") as yaml_file:
project = yaml.safe_load(yaml_file)
def print_committer_info(committer, full):
"""Print committers."""
if full:
- print(" - name: {}".format(committer['name']))
- print(" email: {}".format(committer['email']))
- print(" id: {}".format(committer['id']))
+ print(" - name: {}".format(committer["name"]))
+ print(" email: {}".format(committer["email"]))
+ print(" id: {}".format(committer["id"]))
def list_committers(full, id, project):
"""List commiters from the INFO.yaml file."""
- lookup = project.get('committers', [])
+ lookup = project.get("committers", [])
for item in lookup:
if id:
- if item['id'] == id:
+ if item["id"] == id:
print_committer_info(item, full)
break
else:
continue
print_committer_info(item, full)
+
list_committers(full, id, project)
-@click.command(name='sync-committers')
-@click.argument('info_file')
-@click.argument('ldap_file')
-@click.argument('id')
-@click.option('--repo', type=str, required=False,
- help='repo name')
+@click.command(name="sync-committers")
+@click.argument("info_file")
+@click.argument("ldap_file")
+@click.argument("id")
+@click.option("--repo", type=str, required=False, help="repo name")
@click.pass_context
def sync_committers(ctx, id, info_file, ldap_file, repo):
"""Sync committer information from LDAP into INFO.yaml."""
ryaml.preserve_quotes = True
ryaml.indent(mapping=4, sequence=6, offset=4)
ryaml.explicit_start = True
- with open(info_file, 'r') as stream:
+ with open(info_file, "r") as stream:
try:
yaml.safe_load(stream)
except yaml.YAMLError as exc:
ldap_data = ryaml.load(f)
def readfile(data, ldap_data, id):
- committer_info = info_data['committers']
- repo_info = info_data['repositories']
- committer_info_ldap = ldap_data['committers']
+ committer_info = info_data["committers"]
+ repo_info = info_data["repositories"]
+ committer_info_ldap = ldap_data["committers"]
readldap(id, ldap_file, committer_info, committer_info_ldap, repo, repo_info)
def readldap(id, ldap_file, committer_info, committer_info_ldap, repo, repo_info):
for idx, val in enumerate(committer_info):
- committer = info_data['committers'][idx]['id']
+ committer = info_data["committers"][idx]["id"]
if committer == id:
- print('{} is alread in {}'.format(id, info_file))
+ print("{} is alread in {}".format(id, info_file))
exit()
for idx, val in enumerate(committer_info_ldap):
- committer = ldap_data['committers'][idx]['id']
+ committer = ldap_data["committers"][idx]["id"]
if committer == id:
- name = (ldap_data['committers'][idx]['name'])
- email = (ldap_data['committers'][idx]['email'])
- formatid = (ldap_data['committers'][idx]['id'])
- company = (ldap_data['committers'][idx]['company'])
- timezone = (ldap_data['committers'][idx]['timezone'])
+ name = ldap_data["committers"][idx]["name"]
+ email = ldap_data["committers"][idx]["email"]
+ formatid = ldap_data["committers"][idx]["id"]
+ company = ldap_data["committers"][idx]["company"]
+ timezone = ldap_data["committers"][idx]["timezone"]
try:
name
except NameError:
- print('{} does not exist in {}'.format(id, ldap_file))
+ print("{} does not exist in {}".format(id, ldap_file))
exit()
user = ruamel.yaml.comments.CommentedMap(
- (
- ('name', name), ('company', company), ('email', email), ('id', formatid), ('timezone', timezone)
- )
+ (("name", name), ("company", company), ("email", email), ("id", formatid), ("timezone", timezone))
)
- info_data['repositories'][0] = repo
+ info_data["repositories"][0] = repo
committer_info.append(user)
- with open(info_file, 'w') as f:
+ with open(info_file, "w") as f:
ryaml.dump(info_data, f)
readfile(info_data, ldap_data, id)
-@click.command(name='check-votes')
-@click.argument('info_file')
-@click.argument('endpoint', type=str)
-@click.argument('change_number', type=int)
-@click.option('--tsc', type=str, required=False,
- help='path to TSC INFO file')
-@click.option('--github_repo', type=str, required=False,
- help='Provide github repo to Check against a Github Change')
+@click.command(name="check-votes")
+@click.argument("info_file")
+@click.argument("endpoint", type=str)
+@click.argument("change_number", type=int)
+@click.option("--tsc", type=str, required=False, help="path to TSC INFO file")
+@click.option("--github_repo", type=str, required=False, help="Provide github repo to Check against a Github Change")
@click.pass_context
def check_votes(ctx, info_file, endpoint, change_number, tsc, github_repo):
"""Check votes on an INFO.yaml change.
lftools infofile check-votes ~/lf/allrepos/onosfw/INFO.yaml https://gerrit.opnfv.org/gerrit/ 67302
"""
+
def main(ctx, info_file, endpoint, change_number, tsc, github_repo, majority_of_committers):
"""Function so we can iterate into TSC members after commiter vote has happend."""
with open(info_file) as file:
except yaml.YAMLError as exc:
log.error(exc)
- committer_info = info_data['committers']
+ committer_info = info_data["committers"]
info_committers = []
info_change = []
if github_repo:
- id = 'github_id'
+ id = "github_id"
githubvotes = prvotes(endpoint, github_repo, change_number)
for vote in githubvotes:
info_change.append(vote)
else:
- id = 'id'
+ id = "id"
rest = GerritRestAPI(url=endpoint)
changes = rest.get("changes/{}/reviewers".format(change_number))
for change in changes:
- line = (change['username'], change['approvals']['Code-Review'])
- if '+1' in line[1] or '+2' in line[1]:
- info_change.append(change['username'])
+ line = (change["username"], change["approvals"]["Code-Review"])
+ if "+1" in line[1] or "+2" in line[1]:
+ info_change.append(change["username"])
for count, item in enumerate(committer_info):
committer = committer_info[count][id]
info_committers.append(committer)
have_not_voted = [item for item in info_committers if item not in info_change]
- have_not_voted_length = (len(have_not_voted))
+ have_not_voted_length = len(have_not_voted)
have_voted = [item for item in info_committers if item in info_change]
- have_voted_length = (len(have_voted))
+ have_voted_length = len(have_voted)
log.info("Number of Committers:")
log.info(len(info_committers))
- committer_lenght = (len(info_committers))
+ committer_lenght = len(info_committers)
log.info("Committers that have voted:")
log.info(have_voted)
log.info(have_voted_length)
log.info(have_not_voted)
log.info(have_not_voted_length)
- if (have_voted_length == 0):
+ if have_voted_length == 0:
log.info("No one has voted:")
sys.exit(1)
- if (have_voted_length != 0):
- majority = (committer_lenght / have_voted_length)
- if (majority >= 1):
+ if have_voted_length != 0:
+ majority = committer_lenght / have_voted_length
+ if majority >= 1:
log.info("Majority committer vote reached")
- if (tsc):
+ if tsc:
log.info("Need majority of tsc")
info_file = tsc
majority_of_committers += 1
if majority_of_committers == 2:
log.info("TSC majority reached auto merging commit")
else:
- main(ctx, info_file, endpoint, change_number, tsc, github_repo, majority_of_committers)
+ main(ctx, info_file, endpoint, change_number, tsc, github_repo, majority_of_committers)
else:
log.info("majority not yet reached")
sys.exit(1)
+
majority_of_committers = 0
main(ctx, info_file, endpoint, change_number, tsc, github_repo, majority_of_committers)
##############################################################################
"""Jenkins information."""
-__author__ = 'Trevor Bramwell'
+__author__ = "Trevor Bramwell"
import logging
@click.group()
+@click.option("-c", "--conf", type=str, default=None, help="Path to jenkins_jobs.ini config.")
@click.option(
- '-c', '--conf', type=str, default=None,
- help='Path to jenkins_jobs.ini config.')
-@click.option(
- '-s', '--server', type=str, envvar='JENKINS_URL', default='jenkins',
- help='The URL to a Jenkins server. Alternatively the jenkins_jobs.ini '
- 'section to parse for url/user/password configuration if available.')
-@click.option('-u', '--user', type=str, envvar='JENKINS_USER', default='admin')
-@click.option('-p', '--password', type=str, envvar='JENKINS_PASSWORD')
+ "-s",
+ "--server",
+ type=str,
+ envvar="JENKINS_URL",
+ default="jenkins",
+ help="The URL to a Jenkins server. Alternatively the jenkins_jobs.ini "
+ "section to parse for url/user/password configuration if available.",
+)
+@click.option("-u", "--user", type=str, envvar="JENKINS_USER", default="admin")
+@click.option("-p", "--password", type=str, envvar="JENKINS_PASSWORD")
@click.pass_context
def jenkins_cli(ctx, server, user, password, conf):
"""Query information about the Jenkins Server."""
# Initial the Jenkins object and pass it to sub-commands
- ctx.obj['jenkins'] = Jenkins(server, user, password, config_file=conf)
+ ctx.obj["jenkins"] = Jenkins(server, user, password, config_file=conf)
@click.command()
@click.pass_context
def get_credentials(ctx):
"""Print all available Credentials."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
groovy_script = """
import com.cloudbees.plugins.credentials.*
@click.command()
-@click.argument('groovy_file')
+@click.argument("groovy_file")
@click.pass_context
def groovy(ctx, groovy_file):
"""Run a groovy script."""
- with open(groovy_file, 'r') as f:
+ with open(groovy_file, "r") as f:
data = f.read()
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
result = jenkins.server.run_script(data)
log.info(result)
@click.pass_context
def quiet_down(ctx, n):
"""Put Jenkins into 'Quiet Down' mode."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
version = jenkins.server.get_version()
# Ask permission first
if n:
jenkins.server.quiet_down()
except HTTPError as m:
if m.code == 405:
- log.error("\n[%s]\nJenkins %s does not support Quiet Down "
- "without a CSRF Token. (CVE-2017-04-26)\nPlease "
- "file a bug with 'python-jenkins'" % (m, version))
+ log.error(
+ "\n[%s]\nJenkins %s does not support Quiet Down "
+ "without a CSRF Token. (CVE-2017-04-26)\nPlease "
+ "file a bug with 'python-jenkins'" % (m, version)
+ )
else:
raise m
@click.command()
@click.option(
- '--force', is_flag=True, default=False,
- help='Forcibly remove nodes, use only if the non-force version fails.')
+ "--force", is_flag=True, default=False, help="Forcibly remove nodes, use only if the non-force version fails."
+)
@click.pass_context
def remove_offline_nodes(ctx, force):
"""Remove any offline nodes."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
groovy_script = """
import hudson.model.*
log.info(result)
-jenkins_cli.add_command(plugins_init, name='plugins')
+jenkins_cli.add_command(plugins_init, name="plugins")
jenkins_cli.add_command(nodes)
jenkins_cli.add_command(builds)
-jenkins_cli.add_command(get_credentials, name='get-credentials')
+jenkins_cli.add_command(get_credentials, name="get-credentials")
jenkins_cli.add_command(groovy)
jenkins_cli.add_command(jobs)
-jenkins_cli.add_command(quiet_down, name='quiet-down')
-jenkins_cli.add_command(remove_offline_nodes, name='remove-offline-nodes')
+jenkins_cli.add_command(quiet_down, name="quiet-down")
+jenkins_cli.add_command(remove_offline_nodes, name="remove-offline-nodes")
jenkins_cli.add_command(token)
##############################################################################
"""Jenkins build information."""
-__author__ = 'Trevor Bramwell'
+__author__ = "Trevor Bramwell"
import click
@click.pass_context
def running(ctx):
"""Show all the currently running builds."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
running_builds = jenkins.server.get_running_builds()
for build in running_builds:
- print("- %s on %s" % (build['name'], build['node']))
+ print("- %s on %s" % (build["name"], build["node"]))
@click.command()
@click.pass_context
def queued(ctx):
"""Show all jobs waiting in the queue and their status."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
queue = jenkins.server.get_queue_info()
queue_length = len(queue)
print("Build Queue (%s)" % queue_length)
for build in queue:
- print(" - %s" % (build['task']['name'])),
- if build['stuck']:
+ print(" - %s" % (build["task"]["name"])),
+ if build["stuck"]:
print("[Stuck]")
- if build['blocked']:
+ if build["blocked"]:
print("[Blocked]")
##############################################################################
"""Jenkins Jobs."""
-__author__ = 'Anil Belur'
+__author__ = "Anil Belur"
import click
@click.command()
-@click.argument('regex')
+@click.argument("regex")
@click.pass_context
def enable(ctx, regex):
"""Enable all Jenkins jobs matching REGEX."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
result = jenkins.server.run_script(enable_disable_jobs.format(regex, "enable"))
print(result)
@click.command()
-@click.argument('regex')
+@click.argument("regex")
@click.pass_context
def disable(ctx, regex):
"""Disable all Jenkins jobs matching REGEX."""
- jenkins = ctx.obj['jenkins']
+ jenkins = ctx.obj["jenkins"]
result = jenkins.server.run_script(enable_disable_jobs.format(regex, "disable"))
print(result)
##############################################################################
"""Jenkins node information."""
-__author__ = 'Trevor Bramwell'
+__author__ = "Trevor Bramwell"
import click
@click.pass_context
def nodes(ctx):
"""Find information about builders connected to Jenkins Master."""
- jenkins = ctx.obj['jenkins']
- ctx.obj['nodes'] = jenkins.server.get_nodes()
+ jenkins = ctx.obj["jenkins"]
+ ctx.obj["nodes"] = jenkins.server.get_nodes()
@click.command()
@click.pass_context
def list_nodes(ctx):
"""List Jenkins nodes."""
- node_list = ctx.obj['nodes']
+ node_list = ctx.obj["nodes"]
for node in node_list:
- print("%s [%s]" % (node['name'], offline_str(node['offline'])))
+ print("%s [%s]" % (node["name"], offline_str(node["offline"])))
-nodes.add_command(list_nodes, name='list')
+nodes.add_command(list_nodes, name="list")
##############################################################################
"""Jenkins plugin information."""
-__author__ = 'Trevor Bramwell'
+__author__ = "Trevor Bramwell"
import click
import requests
def checkmark(truthy):
"""Return a UTF-8 Checkmark or Cross depending on the truthiness of the argument."""
if truthy:
- return u'\u2713'
- return u'\u2717'
+ return "\u2713"
+ return "\u2717"
-def print_plugin(plugin, namefield='longName'):
+def print_plugin(plugin, namefield="longName"):
"""Print the plugin longName and version."""
- print("%s:%s" % (plugin[namefield], plugin['version']))
+ print("%s:%s" % (plugin[namefield], plugin["version"]))
@click.group()
@click.pass_context
def plugins_init(ctx):
"""Inspect Jenkins plugins on the server."""
- jenkins = ctx.obj['jenkins']
- ctx.obj['plugins'] = jenkins.server.get_plugins()
+ jenkins = ctx.obj["jenkins"]
+ ctx.obj["plugins"] = jenkins.server.get_plugins()
@click.command()
Defaults to listing all installed plugins and their current versions
"""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
@click.pass_context
def pinned(ctx):
"""List pinned plugins."""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['pinned']:
+ if plugin["pinned"]:
print_plugin(plugin)
@click.pass_context
def dynamic(ctx):
"""List dynamically reloadable plugins."""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['supportsDynamicLoad'] == "YES":
+ if plugin["supportsDynamicLoad"] == "YES":
print_plugin(plugin)
@click.pass_context
def needs_update(ctx):
"""List pending plugin updates."""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['hasUpdate']:
+ if plugin["hasUpdate"]:
print_plugin(plugin)
@click.pass_context
def enabled(ctx):
"""List enabled plugins."""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['enabled']:
+ if plugin["enabled"]:
print_plugin(plugin)
TODO: In the future this should be part of a command alias and pass a flag
to 'enabled' so that we don't duplicate code.
"""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if not plugin['enabled']:
+ if not plugin["enabled"]:
print_plugin(plugin)
@click.pass_context
def active(ctx):
"""List active plugins."""
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['active']:
+ if plugin["active"]:
print_plugin(plugin)
Vulnerable Version\t Installed Version\t Link.
"""
- r = requests.get('http://updates.jenkins-ci.org/update-center.actual.json')
- warn = r.json()['warnings']
+ r = requests.get("http://updates.jenkins-ci.org/update-center.actual.json")
+ warn = r.json()["warnings"]
# create a dict of relevant info from jenkins update center
secdict = {}
for w in warn:
- name = (w['name'])
- url = (w['url'])
- for version in w['versions']:
- lastversion = version.get('lastVersion')
+ name = w["name"]
+ url = w["url"]
+ for version in w["versions"]:
+ lastversion = version.get("lastVersion")
nv = {name: lastversion}
secdict.update(nv)
# create a dict of our active plugins
activedict = {}
- plugins = ctx.obj['plugins']
+ plugins = ctx.obj["plugins"]
for key in plugins.keys():
_, plugin_name = key
plugin = plugins[plugin_name]
- if plugin['active']:
- name = plugin['shortName']
- version = plugin['version']
+ if plugin["active"]:
+ name = plugin["shortName"]
+ version = plugin["version"]
nv = {name: version}
activedict.update(nv)
shared = []
for key in set(secdict.keys()) & set(activedict.keys()):
shared.append(key)
- ourversion = (activedict[key])
- theirversion = (secdict[key])
+ ourversion = activedict[key]
+ theirversion = secdict[key]
t1 = tuple([ourversion])
t2 = tuple([theirversion])
if (t1) <= (t2):
# Print Vulnerable Version\t Installed Version\t Link
for w in warn:
- name = (w['name'])
- url = (w['url'])
- for version in w['versions']:
- lastversion = version.get('lastVersion')
+ name = w["name"]
+ url = w["url"]
+ for version in w["versions"]:
+ lastversion = version.get("lastVersion")
if name == key and secdict[key] == lastversion:
print("{0}:{1}\t{0}:{2}\t{3}".format(key, secdict[key], activedict[key], url))
-plugins_init.add_command(list_plugins, name='list')
+plugins_init.add_command(list_plugins, name="list")
plugins_init.add_command(pinned)
plugins_init.add_command(dynamic)
-plugins_init.add_command(needs_update, name='needs-update')
+plugins_init.add_command(needs_update, name="needs-update")
plugins_init.add_command(active)
plugins_init.add_command(enabled)
plugins_init.add_command(disabled)
##############################################################################
"""Jenkins token commands."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
import os
@click.command()
-@click.option('--name', type=str, default="token-created-by-lftools",
- help='set token name')
+@click.option("--name", type=str, default="token-created-by-lftools", help="set token name")
@click.pass_context
def change(ctx, name):
"""Generate a new API token."""
- jenkins = ctx.obj['jenkins']
- username = ctx.obj['username']
- password = ctx.obj['password']
+ jenkins = ctx.obj["jenkins"]
+ username = ctx.obj["username"]
+ password = ctx.obj["password"]
if not username or not password:
- log.error('Username or password not set.')
+ log.error("Username or password not set.")
sys.exit(1)
- log.info(get_token(name, jenkins.url, change=True,
- username=username, password=password))
+ log.info(get_token(name, jenkins.url, change=True, username=username, password=password))
@click.command()
-@click.argument('name')
-@click.argument('url')
+@click.argument("name")
+@click.argument("url")
@click.pass_context
def init(ctx, name, url):
"""Initialize jenkins_jobs.ini config for new server section."""
- jenkins = ctx.obj['jenkins']
- username = ctx.obj['username']
- password = ctx.obj['password']
+ jenkins = ctx.obj["jenkins"]
+ username = ctx.obj["username"]
+ password = ctx.obj["password"]
if not username or not password:
- log.error('Username or password not set.')
+ log.error("Username or password not set.")
sys.exit(1)
_require_jjb_ini(jenkins.config_file)
config = configparser.ConfigParser()
config.read(jenkins.config_file)
- token = get_token(url, change=True,
- username=username, password=password)
+ token = get_token(url, change=True, username=username, password=password)
try:
config.add_section(name)
except configparser.DuplicateSectionError as e:
log.error(e)
sys.exit(1)
- config.set(name, 'url', url)
- config.set(name, 'user', lftools_cfg.get_setting('global', 'username'))
- config.set(name, 'password', token)
+ config.set(name, "url", url)
+ config.set(name, "user", lftools_cfg.get_setting("global", "username"))
+ config.set(name, "password", token)
- with open(jenkins.config_file, 'w') as configfile:
+ with open(jenkins.config_file, "w") as configfile:
config.write(configfile)
-@click.command(name='print')
+@click.command(name="print")
@click.pass_context
def print_token(ctx):
"""Print current API token."""
- jenkins = ctx.obj['jenkins']
- username = ctx.obj['username']
- password = ctx.obj['password']
+ jenkins = ctx.obj["jenkins"]
+ username = ctx.obj["username"]
+ password = ctx.obj["password"]
if not username or not password:
- log.error('Username or password not set.')
+ log.error("Username or password not set.")
sys.exit(1)
log.info(get_token(jenkins.url, username=username, password=password))
@click.command()
-@click.argument('servers', nargs=-1, required=False)
+@click.argument("servers", nargs=-1, required=False)
@click.pass_context
def reset(ctx, servers):
"""Regenerate API tokens for configurations in jenkins_jobs.ini.
If the server parameter is NOT passed then all servers listed in the
configuration file will be reset via multi-server mode.
"""
- jenkins = ctx.obj['jenkins']
- username = ctx.obj['username']
- password = ctx.obj['password']
+ jenkins = ctx.obj["jenkins"]
+ username = ctx.obj["username"]
+ password = ctx.obj["password"]
if not username or not password:
- log.error('Username or password not set.')
+ log.error("Username or password not set.")
sys.exit(1)
_require_jjb_ini(jenkins.config_file)
def _reset_key(config, server):
- url = config.get(server, 'url')
+ url = config.get(server, "url")
try:
- token = get_token(url, change=True,
- username=username, password=password)
- config.set(server, 'password', token)
- with open(jenkins.config_file, 'w') as configfile:
+ token = get_token(url, change=True, username=username, password=password)
+ config.set(server, "password", token)
+ with open(jenkins.config_file, "w") as configfile:
config.write(configfile)
return token
except requests.exceptions.ConnectionError as e:
cfg_sections = list(servers)
for section in cfg_sections:
- if not config.has_option(section, 'url'):
- log.debug('Section does not contain a url, skipping...')
+ if not config.has_option(section, "url"):
+ log.debug("Section does not contain a url, skipping...")
continue
- log.info('Resetting API key for {}'.format(section))
+ log.info("Resetting API key for {}".format(section))
if _reset_key(config, section):
success += 1
else:
fail += 1
- log.error('Failed to reset API key for {}'.format(section))
+ log.error("Failed to reset API key for {}".format(section))
- log.info('Update configurations complete.')
- log.info('Success: {}'.format(success))
- log.info('Failed: {}'.format(fail))
+ log.info("Update configurations complete.")
+ log.info("Success: {}".format(success))
+ log.info("Failed: {}".format(fail))
token.add_command(change)
def _require_jjb_ini(config):
if not os.path.isfile(config):
- log.error('jenkins_jobs.ini not found in any of the search paths. '
- 'Please provide one before proceeding.')
+ log.error("jenkins_jobs.ini not found in any of the search paths. " "Please provide one before proceeding.")
sys.exit(1)
@click.command()
-@click.argument('group')
+@click.argument("group")
@click.pass_context
def yaml4info(ctx, group):
"""Build yaml of committers for your INFO.yaml."""
- status = subprocess.call(['yaml4info', group])
+ status = subprocess.call(["yaml4info", group])
sys.exit(status)
@click.command()
-@click.argument('gerrit_url')
-@click.argument('group')
+@click.argument("gerrit_url")
+@click.argument("group")
@click.pass_context
def inactivecommitters(ctx, gerrit_url, group):
"""Check committer participation."""
- status = subprocess.call(['inactivecommitters', gerrit_url, group])
+ status = subprocess.call(["inactivecommitters", gerrit_url, group])
sys.exit(status)
@click.command()
-@click.argument('gerrit_clone_base')
-@click.argument('ldap_group')
-@click.argument('repo')
-@click.option('--purpose', envvar='purpose', type=str,
- help='Must be one of READY_FOR_INFO LINT IN-REVIEW')
-@click.option('--review', type=str, required=False,
- help='review number in gerrit, required if purpose is IN-REVIEW')
+@click.argument("gerrit_clone_base")
+@click.argument("ldap_group")
+@click.argument("repo")
+@click.option("--purpose", envvar="purpose", type=str, help="Must be one of READY_FOR_INFO LINT IN-REVIEW")
+@click.option("--review", type=str, required=False, help="review number in gerrit, required if purpose is IN-REVIEW")
@click.pass_context
def autocorrectinfofile(ctx, gerrit_clone_base, ldap_group, repo, purpose, review):
"""Verify INFO.yaml against LDAP group.\n
PURPOSE must be one of: READY_FOR_INFO LINT IN-REVIEW\n
GERRITCLONEBASE must be a url: https://gerrit.opnfv.org/gerrit/\n
"""
- params = ['autocorrectinfofile']
+ params = ["autocorrectinfofile"]
params.extend([gerrit_clone_base, ldap_group, repo])
if purpose:
params.extend([purpose])
@click.command()
-@click.option('--ldap-server', default='ldaps://pdx-wl-lb-lfldap.web.codeaurora.org',
- envvar='LDAP_SERVER', type=str, required=True)
-@click.option('--ldap-user-base', default='ou=Users,dc=freestandards,dc=org',
- envvar='LDAP_USER_BASE_DN', type=str, required=True)
-@click.option('--ldap-group-base', default='ou=Groups,dc=freestandards,dc=org',
- envvar='LDAP_GROUP_BASE_DN', type=str, required=True)
-@click.argument('groups')
+@click.option(
+ "--ldap-server",
+ default="ldaps://pdx-wl-lb-lfldap.web.codeaurora.org",
+ envvar="LDAP_SERVER",
+ type=str,
+ required=True,
+)
+@click.option(
+ "--ldap-user-base", default="ou=Users,dc=freestandards,dc=org", envvar="LDAP_USER_BASE_DN", type=str, required=True
+)
+@click.option(
+ "--ldap-group-base",
+ default="ou=Groups,dc=freestandards,dc=org",
+ envvar="LDAP_GROUP_BASE_DN",
+ type=str,
+ required=True,
+)
+@click.argument("groups")
@click.pass_context
def csv(ctx, ldap_server, ldap_group_base, ldap_user_base, groups):
"""Query an Ldap server."""
# groups needs to be a list
- groups = groups.split(' ')
+ groups = groups.split(" ")
def ldap_connect(ldap_object):
"""Start the connection to LDAP."""
ldap_object.protocol_version = ldap.VERSION3
ldap_object.simple_bind_s()
except ldap.LDAPError as e:
- if type(e.message) == dict and e.message.has_key('desc'):
- print(e.message['desc'])
+ if type(e.message) == dict and e.message.has_key("desc"):
+ print(e.message["desc"])
else:
print(e)
sys.exit(0)
result_set = []
while 1:
result_type, result_data = ldap_object.result(ldap_result_id, 0)
- if (result_data == []):
+ if result_data == []:
break
else:
# if you are expecting multiple results you can append them
containing the groups member uids.
"""
group_list = []
- cut_length = len(ldap_user_base)+1
+ cut_length = len(ldap_user_base) + 1
for group in groups:
group_d = dict(name=group[0][0])
members = []
for group_attrs in group:
- for member in group_attrs[1]['member']:
+ for member in group_attrs[1]["member"]:
members.append(member[:-cut_length])
- group_d['members'] = members
+ group_d["members"] = members
group_list.append(group_d)
return group_list
def user_to_csv(user):
"""Covert LDIF user info to CSV of uid,mail,cn."""
- attrs = (user[0][0][1])
- return ",".join([attrs['uid'][0].decode('utf-8'), attrs['cn'][0].decode('utf-8'), attrs['mail'][0].decode('utf-8')])
+ attrs = user[0][0][1]
+ return ",".join(
+ [attrs["uid"][0].decode("utf-8"), attrs["cn"][0].decode("utf-8"), attrs["mail"][0].decode("utf-8")]
+ )
def main(groups):
"""Preform an LDAP query."""
for arg in groups:
groups = ldap_query(l, ldap_group_base, "cn=%s" % arg, ["member"])
group_dict = package_groups(groups)
- cut_length = len(ldap_group_base)+1
+ cut_length = len(ldap_group_base) + 1
for group_bar in group_dict:
- group_name = group_bar['name'][3:-cut_length]
- for user in group_bar['members']:
- user = (user.decode('utf-8'))
+ group_name = group_bar["name"][3:-cut_length]
+ for user in group_bar["members"]:
+ user = user.decode("utf-8")
user_info = ldap_query(l, ldap_user_base, user, ["uid", "cn", "mail"])
try:
print("%s,%s" % (group_name, user_to_csv(user_info)))
eprint("Error parsing user: %s" % user)
continue
ldap_disconnect(l)
+
main(groups)
@click.command()
-@click.argument('group')
+@click.argument("group")
@click.pass_context
def search_members(ctx, group):
"""List members of a group."""
members = helper_search_members(group)
for member in members:
- log.info('%s <%s>' % (member['username'], member['mail']))
+ log.info("%s <%s>" % (member["username"], member["mail"]))
@click.command()
-@click.argument('user')
-@click.option('--delete', is_flag=True, required=False,
- help='remove user from group')
-@click.argument('group')
+@click.argument("user")
+@click.option("--delete", is_flag=True, required=False, help="remove user from group")
+@click.argument("group")
@click.pass_context
def user(ctx, user, group, delete):
"""Add and remove users from groups."""
@click.command()
-@click.argument('email')
-@click.argument('group')
+@click.argument("email")
+@click.argument("group")
@click.pass_context
def invite(ctx, email, group):
"""Email invitation to join group."""
@click.command()
-@click.argument('group')
+@click.argument("group")
@click.pass_context
def create_group(ctx, group):
"""Create group."""
@click.command()
-@click.argument('info_file')
-@click.argument('group')
-@click.option('--githuborg', type=str, required=False,
- help='github org name')
-@click.option('--noop', is_flag=True, required=False,
- help='show what would be changed')
+@click.argument("info_file")
+@click.argument("group")
+@click.option("--githuborg", type=str, required=False, help="github org name")
+@click.option("--noop", is_flag=True, required=False, help="show what would be changed")
@click.pass_context
def match_ldap_to_info(ctx, info_file, group, githuborg, noop):
"""Match an LDAP or GITHUB group membership to an INFO.yaml file."""
##############################################################################
"""Scan code for license headers."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import sys
@click.command()
-@click.argument('source')
-@click.option('-l', '--license', default='license-header.txt',
- help='License header file to compare against.')
+@click.argument("source")
+@click.option("-l", "--license", default="license-header.txt", help="License header file to compare against.")
@click.pass_context
def check(ctx, license, source):
"""Check files for missing license headers.
sys.exit(exit_code)
-@click.command(name='check-dir')
-@click.argument('directory')
-@click.option('-l', '--license', default='license-header.txt',
- help='License header file to compare against.')
-@click.option('-r', '--regex', default='.+\.py$',
- help='File regex pattern to match on when searching.')
+@click.command(name="check-dir")
+@click.argument("directory")
+@click.option("-l", "--license", default="license-header.txt", help="License header file to compare against.")
+@click.option("-r", "--regex", default=".+\.py$", help="File regex pattern to match on when searching.")
@click.pass_context
def check_directory(ctx, license, directory, regex):
"""Check directory for files missing license headers.
from lftools.nexus import cmd as nexuscmd
import lftools.nexus.release_docker_hub as rdh
-NEXUS_URL_ENV = 'NEXUS_URL'
+NEXUS_URL_ENV = "NEXUS_URL"
@click.group()
@click.command()
-@click.option('-s', '--settings', type=str, required=True)
+@click.option("-s", "--settings", type=str, required=True)
@click.pass_context
def reorder_staged_repos(ctx, settings):
"""Reorder staging repositories in Nexus.
@create.command()
@click.option(
- '-c', '--config', type=str, required=True,
- help='Repo config file for how the Nexus repository should be created.')
-@click.option(
- '-s', '--settings', type=str, required=True,
- help='Config file containing administrative settings.')
+ "-c", "--config", type=str, required=True, help="Repo config file for how the Nexus repository should be created."
+)
+@click.option("-s", "--settings", type=str, required=True, help="Config file containing administrative settings.")
@click.pass_context
def repo(ctx, config, settings):
"""Create a Nexus repository as defined by a repo-config.yaml file."""
@create.command()
@click.option(
- '-c', '--config', type=str, required=True,
- help='Role config file for how the Nexus role should be created.')
-@click.option(
- '-s', '--settings', type=str, required=True,
- help='Config file containing administrative settings.')
+ "-c", "--config", type=str, required=True, help="Role config file for how the Nexus role should be created."
+)
+@click.option("-s", "--settings", type=str, required=True, help="Config file containing administrative settings.")
@click.pass_context
def role(ctx, config, settings):
"""Create a Nexus role as defined by a role-config.yaml file."""
def docker_params(command):
"""Common options and arguments for all docker subcommands."""
command = click.option(
- '--settings', type=str,
- help=('Yaml file containing "nexus" (url), "user", and "password" '
- 'definitions.'))(command)
+ "--settings", type=str, help=('Yaml file containing "nexus" (url), "user", and "password" ' "definitions.")
+ )(command)
command = click.option(
- '-s', '--server', type=str,
- help=('Nexus server URL. Can also be set as {} in the environment. '
- 'This will override any URL set in settings.yaml.').format(
- NEXUS_URL_ENV))(command)
- command = click.argument('REPO', type=str)(command)
- command = click.argument('PATTERN', type=str, default="*")(command)
+ "-s",
+ "--server",
+ type=str,
+ help=(
+ "Nexus server URL. Can also be set as {} in the environment. "
+ "This will override any URL set in settings.yaml."
+ ).format(NEXUS_URL_ENV),
+ )(command)
+ command = click.argument("REPO", type=str)(command)
+ command = click.argument("PATTERN", type=str, default="*")(command)
return command
@docker.command(name="list")
@docker_params
@click.option(
- '--csv', type=click.Path(dir_okay=False, writable=True),
- help='Write a csv file of the search results to PATH.')
+ "--csv", type=click.Path(dir_okay=False, writable=True), help="Write a csv file of the search results to PATH."
+)
@click.pass_context
def list_images(ctx, settings, server, repo, pattern, csv):
"""List images matching the PATTERN.
@docker.command(name="delete")
@docker_params
-@click.option(
- '-y', '--yes', is_flag=True, help="Answer yes to all prompts")
+@click.option("-y", "--yes", is_flag=True, help="Answer yes to all prompts")
@click.pass_context
def delete_images(ctx, settings, server, repo, pattern, yes):
"""Delete all images matching the PATTERN.
to delete images NOT matching the string.
"""
images = nexuscmd.search(settings, server, repo, pattern)
- if yes or click.confirm("Would you like to delete all {} images?".format(
- str(len(images)))):
+ if yes or click.confirm("Would you like to delete all {} images?".format(str(len(images)))):
nexuscmd.delete_images(settings, server, images)
@nexus.command()
@click.pass_context
-@click.argument('REPOS', type=str, nargs=-1)
-@click.option('-v', '--verify-only', 'verify', is_flag=True, required=False)
+@click.argument("REPOS", type=str, nargs=-1)
+@click.option("-v", "--verify-only", "verify", is_flag=True, required=False)
@click.option(
- '-s', '--server', type=str,
- help=('Nexus server URL. Can also be set as {} in the environment. '
- 'This will override any URL set in settings.yaml.').format(
- NEXUS_URL_ENV))
+ "-s",
+ "--server",
+ type=str,
+ help=(
+ "Nexus server URL. Can also be set as {} in the environment. "
+ "This will override any URL set in settings.yaml."
+ ).format(NEXUS_URL_ENV),
+)
def release(ctx, repos, verify, server):
"""Release one or more staging repositories."""
if not server and NEXUS_URL_ENV in environ:
@docker.command(name="releasedockerhub")
+@click.option("-o", "--org", type=str, required=True, help="Specify repository organization.")
@click.option(
- '-o', '--org', type=str, required=True,
- help='Specify repository organization.')
-@click.option(
- '-r', '--repo', type=str, default='', required=False,
- help='Only repos containing this string will be selected. '
- 'Default set to blank string, which is every repo.')
-@click.option(
- '-e', '--exact', is_flag=True, required=False, default=False,
- help='Match the exact repo name. '
- 'If used, --repo parameter can not be empty.')
-@click.option(
- '-s', '--summary', is_flag=True, required=False,
- help='Prints a summary of missing docker tags.')
+ "-r",
+ "--repo",
+ type=str,
+ default="",
+ required=False,
+ help="Only repos containing this string will be selected. " "Default set to blank string, which is every repo.",
+)
@click.option(
- '-v', '--verbose', is_flag=True, required=False,
- help='Prints all collected repo/tag information.')
+ "-e",
+ "--exact",
+ is_flag=True,
+ required=False,
+ default=False,
+ help="Match the exact repo name. " "If used, --repo parameter can not be empty.",
+)
+@click.option("-s", "--summary", is_flag=True, required=False, help="Prints a summary of missing docker tags.")
+@click.option("-v", "--verbose", is_flag=True, required=False, help="Prints all collected repo/tag information.")
@click.option(
- '-c', '--copy', is_flag=True, required=False, default=False,
- help='Copy missing tags from Nexus3 repos to Docker Hub repos.')
+ "-c",
+ "--copy",
+ is_flag=True,
+ required=False,
+ default=False,
+ help="Copy missing tags from Nexus3 repos to Docker Hub repos.",
+)
@click.option(
- '-p', '--progbar', is_flag=True, required=False, default=False,
- help='Display a progress bar for the time consuming jobs.')
+ "-p",
+ "--progbar",
+ is_flag=True,
+ required=False,
+ default=False,
+ help="Display a progress bar for the time consuming jobs.",
+)
@click.pass_context
def copy_from_nexus3_to_dockerhub(ctx, org, repo, exact, summary, verbose, copy, progbar):
"""Find missing repos in Docker Hub, Copy from Nexus3.
"""Nexus2 REST API sub-interfaces."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
from lftools.api.endpoints import nexus2
@click.group(name="nexus2")
-@click.argument('fqdn')
+@click.argument("fqdn")
@click.pass_context
def nexus_two(ctx, fqdn):
"""The Nexus2 API Interface."""
"""Nexus2 REST API user interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List privileges."""
r = ctx.obj["nexus2"]
data = r.privilege_list()
- log.info(
- tabulate(
- data,
- headers=[
- "Name",
- "ID"
- ]
- ))
+ log.info(tabulate(data, headers=["Name", "ID"]))
@privilege.command(name="create")
"""Nexus2 REST API repository interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List repositories."""
r = ctx.obj["nexus2"]
data = r.repo_list()
- log.info(
- tabulate(
- data,
- headers=[
- "Name",
- "Type",
- "Provider",
- "ID"
- ]
- ))
+ log.info(tabulate(data, headers=["Name", "Type", "Provider", "ID"]))
@repo.command(name="create")
@click.argument("repo_name")
@click.argument("repo_provider")
@click.argument("repo_policy")
-@click.option('-u', '--upstream-repo', 'repo_upstream_url')
+@click.option("-u", "--upstream-repo", "repo_upstream_url")
@click.pass_context
def create(ctx, repo_type, repo_id, repo_name, repo_provider, repo_policy, repo_upstream_url):
"""Create a new repository."""
"""Nexus2 REST API user interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List users."""
r = ctx.obj["nexus2"]
data = r.role_list()
- log.info(
- tabulate(
- data,
- headers=[
- "ID",
- "Name",
- "Roles",
- "Privileges"
- ],
- tablefmt="grid"
- ))
+ log.info(tabulate(data, headers=["ID", "Name", "Roles", "Privileges"], tablefmt="grid"))
@role.command(name="create")
@click.argument("role_id")
@click.argument("role_name")
-@click.option('-d', "role_description", required=False)
-@click.option('-r', "roles_list", required=False)
-@click.option('-p', "privileges_list", required=False)
+@click.option("-d", "role_description", required=False)
+@click.option("-r", "roles_list", required=False)
+@click.option("-p", "privileges_list", required=False)
@click.pass_context
def role_create(ctx, role_id, role_name, role_description, roles_list, privileges_list):
"""Create a new role."""
"""Nexus2 REST API user interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List users."""
r = ctx.obj["nexus2"]
data = r.user_list()
- log.info(
- tabulate(
- data,
- headers=[
- "ID",
- "First Name",
- "Last Name",
- "Status",
- "Roles"
- ]
- ))
+ log.info(tabulate(data, headers=["ID", "First Name", "Last Name", "Status", "Roles"]))
@user.command(name="add")
"""Nexus3 REST API sub-interfaces."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
from .asset import *
from .privilege import *
"""Nexus3 REST API asset interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
from pprint import pformat
"""Nexus3 REST API privileges interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List privileges."""
r = ctx.obj["nexus3"]
data = r.list_privileges()
- log.info(
- tabulate(data, headers=["Type", "Name", "Description", "Read Only"])
- )
+ log.info(tabulate(data, headers=["Type", "Name", "Description", "Read Only"]))
"""Nexus3 REST API repository interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
from pprint import pformat
"""Nexus3 REST API role interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
from pprint import pformat
"""Nexus3 REST API script interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""Nexus3 REST API tag interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
from pprint import pformat
"""Nexus3 REST API task interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""List tasks."""
r = ctx.obj["nexus3"]
data = r.list_tasks()
- log.info(
- tabulate(
- data,
- headers=["Name", "Message", "Current State", "Last Run Result"],
- )
- )
+ log.info(tabulate(data, headers=["Name", "Message", "Current State", "Last Run Result"],))
"""Nexus3 REST API user interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
"""Search users."""
r = ctx.obj["nexus3"]
data = r.list_user(username)
- log.info(
- tabulate(
- data,
- headers=[
- "User ID",
- "First Name",
- "Last Name",
- "Email Address",
- "Status",
- "Roles",
- ],
- )
- )
+ log.info(tabulate(data, headers=["User ID", "First Name", "Last Name", "Email Address", "Status", "Roles",],))
##############################################################################
"""CLI configuration for ldap command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import click
"""Read the Docs interface."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
pass
-@click.command(name='project-list')
+@click.command(name="project-list")
@click.pass_context
def project_list(ctx):
"""Get a list of Read the Docs projects.
log.info(project)
-@click.command(name='project-details')
-@click.argument('project-slug')
+@click.command(name="project-details")
+@click.argument("project-slug")
@click.pass_context
def project_details(ctx, project_slug):
"""Retrieve project details."""
log.info(pformat(data))
-@click.command(name='project-version-list')
-@click.argument('project-slug')
+@click.command(name="project-version-list")
+@click.argument("project-slug")
@click.pass_context
def project_version_list(ctx, project_slug):
"""Retrieve project version list."""
log.info(version)
-@click.command(name='project-version-update')
-@click.argument('project-slug')
-@click.argument('version-slug')
-@click.argument('active', type=click.BOOL)
+@click.command(name="project-version-update")
+@click.argument("project-slug")
+@click.argument("version-slug")
+@click.argument("active", type=click.BOOL)
@click.pass_context
def project_version_update(ctx, project_slug, version_slug, active):
"""Update projects active version.
log.info(data)
-@click.command(name='project-version-details')
-@click.argument('project-slug')
-@click.argument('version-slug')
+@click.command(name="project-version-details")
+@click.argument("project-slug")
+@click.argument("version-slug")
@click.pass_context
def project_version_details(ctx, project_slug, version_slug):
"""Retrieve project version details."""
log.info(data)
-@click.command(name='project-create')
-@click.argument('project-name')
-@click.argument('repository-url')
-@click.argument('repository-type')
-@click.argument('homepage')
-@click.argument('programming-language')
-@click.argument('language')
+@click.command(name="project-create")
+@click.argument("project-name")
+@click.argument("repository-url")
+@click.argument("repository-type")
+@click.argument("homepage")
+@click.argument("programming-language")
+@click.argument("language")
@click.pass_context
-def project_create(ctx, project_name, repository_url, repository_type,
- homepage, programming_language, language):
+def project_create(ctx, project_name, repository_url, repository_type, homepage, programming_language, language):
"""Create a new project."""
r = readthedocs.ReadTheDocs()
- data = r.project_create(project_name, repository_url, repository_type,
- homepage, programming_language, language)
+ data = r.project_create(project_name, repository_url, repository_type, homepage, programming_language, language)
log.info(pformat(data))
-@click.command(name='project-update',
- context_settings=dict(ignore_unknown_options=True,
- allow_extra_args=True,))
-@click.argument('project-name')
+@click.command(name="project-update", context_settings=dict(ignore_unknown_options=True, allow_extra_args=True,))
+@click.argument("project-name")
@click.pass_context
def project_update(ctx, project_name):
"""Create a new project."""
r = readthedocs.ReadTheDocs()
d = dict()
for item in ctx.args:
- d.update([item.split('=')])
+ d.update([item.split("=")])
data = r.project_update(project_name, d)
log.info(pformat(data))
-@click.command(name='project-build-list')
-@click.argument('project-slug')
+@click.command(name="project-build-list")
+@click.argument("project-slug")
@click.pass_context
def project_build_list(ctx, project_slug):
"""Retrieve a list of a project's builds."""
log.info(data)
-@click.command(name='project-build-details')
-@click.argument('project-slug')
-@click.argument('build-id')
+@click.command(name="project-build-details")
+@click.argument("project-slug")
+@click.argument("build-id")
@click.pass_context
def project_build_details(ctx, project_slug, build_id):
"""Retrieve specific project build details."""
log.info(data)
-@click.command(name='project-build-trigger')
-@click.argument('project-slug')
-@click.argument('version-slug')
+@click.command(name="project-build-trigger")
+@click.argument("project-slug")
+@click.argument("version-slug")
@click.pass_context
def project_build_trigger(ctx, project_slug, version_slug):
"""Trigger a new build."""
log.info(data)
-@click.command(name='subproject-list')
-@click.argument('project-slug')
+@click.command(name="subproject-list")
+@click.argument("project-slug")
@click.pass_context
def subproject_list(ctx, project_slug):
"""Get a list of Read the Docs subprojects for a project.
log.info(subproject)
-@click.command(name='subproject-details')
-@click.argument('project-slug')
-@click.argument('subproject-slug')
+@click.command(name="subproject-details")
+@click.argument("project-slug")
+@click.argument("subproject-slug")
@click.pass_context
def subproject_details(ctx, project_slug, subproject_slug):
"""Retrieve subproject's details."""
log.info(pformat(data))
-@click.command(name='subproject-create')
-@click.argument('project-slug')
-@click.argument('subproject-slug')
+@click.command(name="subproject-create")
+@click.argument("project-slug")
+@click.argument("subproject-slug")
@click.pass_context
def subproject_create(ctx, project_slug, subproject_slug):
"""Create a project-subproject relationship."""
log.info(pformat(data))
-@click.command(name='subproject-delete')
-@click.argument('project-slug')
-@click.argument('subproject-slug')
+@click.command(name="subproject-delete")
+@click.argument("project-slug")
+@click.argument("subproject-slug")
@click.pass_context
def subproject_delete(ctx, project_slug, subproject_slug):
"""Delete a project-subproject relationship."""
r = readthedocs.ReadTheDocs()
data = r.subproject_delete(project_slug, subproject_slug)
if data:
- log.info("Successfully removed the {} {} relationship"
- .format(project_slug, subproject_slug))
+ log.info("Successfully removed the {} {} relationship".format(project_slug, subproject_slug))
else:
log.error("Request failed. Is there a subproject relationship?")
pass
-@click.command(name='verify')
-@click.argument('yamlfile')
-@click.argument('schemafile')
+@click.command(name="verify")
+@click.argument("yamlfile")
+@click.argument("schemafile")
@click.pass_context
def verify_schema(ctx, yamlfile, schemafile):
"""Verify YAML Schema.
##############################################################################
"""Script to GPG or Sigul sign files."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import subprocess
pass
-@click.command(name='dir')
-@click.argument('directory')
-@click.option(
- '-m', '--mode', type=str, default='parallel',
- help='Signing mode serial|parallel')
+@click.command(name="dir")
+@click.argument("directory")
+@click.option("-m", "--mode", type=str, default="parallel", help="Signing mode serial|parallel")
@click.pass_context
def directory(ctx, directory, mode):
"""GPG signs all of the files in a directory."""
- status = subprocess.call(['sign', 'dir', directory, mode])
+ status = subprocess.call(["sign", "dir", directory, mode])
sys.exit(status)
-@click.command(name='git-tag')
-@click.argument('tag')
+@click.command(name="git-tag")
+@click.argument("tag")
@click.pass_context
def git_tag(ctx, tag):
"""Sigul sign an annotated git tag."""
- status = subprocess.call(['sign', 'git-tag', tag])
+ status = subprocess.call(["sign", "git-tag", tag])
sys.exit(status)
-@click.command(name='container')
-@click.argument('manifest')
-@click.argument('tag')
+@click.command(name="container")
+@click.argument("manifest")
+@click.argument("tag")
@click.pass_context
def container(ctx, manifest, tag):
"""Sigul sign a Docker container."""
- status = subprocess.call(['sign', 'container', manifest, tag])
+ status = subprocess.call(["sign", "container", manifest, tag])
sys.exit(status)
-@click.command(name='nexus')
-@click.argument('nexus-repo-url')
+@click.command(name="nexus")
+@click.argument("nexus-repo-url")
@click.option(
- '-d', '--sign-dir', type=str,
+ "-d",
+ "--sign-dir",
+ type=str,
default=None,
- help='Local directory to clone repository. (default /tmp/gpg-signatures.*)')
-@click.option(
- '-m', '--mode', type=str, default='parallel',
- help='Signing mode serial|parallel')
-@click.option(
- '-w', '--sign-with', type=str, default='gpg',
- help='Sign artifacts with GPG or Sigul. (default gpg)')
+ help="Local directory to clone repository. (default /tmp/gpg-signatures.*)",
+)
+@click.option("-m", "--mode", type=str, default="parallel", help="Signing mode serial|parallel")
+@click.option("-w", "--sign-with", type=str, default="gpg", help="Sign artifacts with GPG or Sigul. (default gpg)")
@click.pass_context
def nexus(ctx, sign_dir, sign_with, nexus_repo_url, mode):
"""Fetch and GPG or Sigul sign a Nexus repo."""
if not sign_dir:
- sign_dir = tempfile.mkdtemp(prefix='gpg-signatures.')
- status = subprocess.call(['sign', 'nexus', '-d', sign_dir, '-m', mode, '-w', sign_with, nexus_repo_url])
+ sign_dir = tempfile.mkdtemp(prefix="gpg-signatures.")
+ status = subprocess.call(["sign", "nexus", "-d", sign_dir, "-m", mode, "-w", sign_with, nexus_repo_url])
sys.exit(status)
-@click.command(name='sigul')
-@click.argument('directory')
-@click.option(
- '-m', '--mode', type=str, default='parallel',
- help='Signing mode serial|parallel')
+@click.command(name="sigul")
+@click.argument("directory")
+@click.option("-m", "--mode", type=str, default="parallel", help="Signing mode serial|parallel")
@click.pass_context
def sigul(ctx, directory, mode):
"""Sigul signs all of the files in a directory."""
- status = subprocess.call(['sign', 'sigul', directory, mode])
+ status = subprocess.call(["sign", "sigul", directory, mode])
sys.exit(status)
-@click.command(name='deploy-nexus')
-@click.argument('nexus-url', envvar='NEXUS_URL')
-@click.argument('nexus-repo', envvar='NEXUS_REPO')
-@click.argument('staging-profile-id', envvar='STAGING_PROFILE_ID')
+@click.command(name="deploy-nexus")
+@click.argument("nexus-url", envvar="NEXUS_URL")
+@click.argument("nexus-repo", envvar="NEXUS_REPO")
+@click.argument("staging-profile-id", envvar="STAGING_PROFILE_ID")
@click.option(
- '-d', '--sign-dir', type=str,
+ "-d",
+ "--sign-dir",
+ type=str,
default=None,
- help='Local directory to clone repository. (default /tmp/gpg-signatures.*)')
-@click.option(
- '-m', '--mode', type=str, default='parallel',
- help='Signing mode serial|parallel')
-@click.option(
- '-r', '--root-domain', type=str, default='org',
- help='Root download path of staging repo. (default org)')
-@click.option(
- '-w', '--sign-with', type=str, default='gpg',
- help='Sign artifacts with GPG or Sigul. (default gpg)')
+ help="Local directory to clone repository. (default /tmp/gpg-signatures.*)",
+)
+@click.option("-m", "--mode", type=str, default="parallel", help="Signing mode serial|parallel")
+@click.option("-r", "--root-domain", type=str, default="org", help="Root download path of staging repo. (default org)")
+@click.option("-w", "--sign-with", type=str, default="gpg", help="Sign artifacts with GPG or Sigul. (default gpg)")
@click.pass_context
def deploy_nexus(ctx, nexus_url, nexus_repo, staging_profile_id, sign_dir, sign_with, root_domain, mode):
"""Sign artifacts from a Nexus repo then upload to a staging repo.
# as a workaround we have to at least give it 1 directory deep. Since most
# LF projects are 'org' domain default is org but can be override with the
# -r option.
- nexus_url = nexus_url.rstrip('/')
+ nexus_url = nexus_url.rstrip("/")
nexus_repo_url = "{}/content/repositories/{}/{}".format(nexus_url, nexus_repo, root_domain)
if not sign_dir:
- sign_dir = tempfile.mkdtemp(prefix='gpg-signatures.')
+ sign_dir = tempfile.mkdtemp(prefix="gpg-signatures.")
- status = subprocess.call(['sign', 'nexus', '-d', sign_dir, '-m', mode, '-w', sign_with, nexus_repo_url])
+ status = subprocess.call(["sign", "nexus", "-d", sign_dir, "-m", mode, "-w", sign_with, nexus_repo_url])
if status:
sys.exit(status)
- status = subprocess.call(['deploy', 'nexus-stage', nexus_url, staging_profile_id, sign_dir])
+ status = subprocess.call(["deploy", "nexus-stage", nexus_url, staging_profile_id, sign_dir])
sys.exit(status)
##############################################################################
"""Version bump script for Maven based projects."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
@click.command()
-@click.argument('release-tag')
+@click.argument("release-tag")
@click.pass_context
def bump(ctx, release_tag):
"""Version bump pom files in a Maven project by x.(y+1).z or x.y.(z+1).
3. Change x.y.z-SNAPSHOT versions to x.(y+1).0-SNAPSHOT
4. Change x.y.z-RELEASE_TAG versions to x.y.(z+1)-SNAPSHOT and
"""
- status = subprocess.call(['version', 'bump', release_tag])
+ status = subprocess.call(["version", "bump", release_tag])
sys.exit(status)
@click.command()
-@click.argument('release-tag')
+@click.argument("release-tag")
@click.pass_context
def release(ctx, release_tag):
"""Version bump pom files in a Maven project from SNAPSHOT to RELEASE_TAG.
Searches poms for all instances of SNAPSHOT version and changes it to
RELEASE_TAG.
"""
- status = subprocess.call(['version', 'release', release_tag])
+ status = subprocess.call(["version", "release", release_tag])
sys.exit(status)
@click.command()
-@click.argument('release-tag')
-@click.argument('patch-dir')
-@click.option(
- '--project', default='OpenDaylight',
- help='Project name to use when tagging. (Default: OpenDaylight)')
+@click.argument("release-tag")
+@click.argument("patch-dir")
+@click.option("--project", default="OpenDaylight", help="Project name to use when tagging. (Default: OpenDaylight)")
@click.pass_context
def patch(ctx, release_tag, patch_dir, project):
"""Patch a project with git.bundles and then version bump.
if not os.path.isdir(patch_dir):
log.error("{} is not a valid directory.".format(patch_dir))
sys.exit(404)
- status = subprocess.call(['version', 'patch', release_tag, patch_dir, project])
+ status = subprocess.call(["version", "patch", release_tag, patch_dir, project])
sys.exit(status)
log = logging.getLogger(__name__)
-LFTOOLS_CONFIG_FILE = os.path.join(XDG_CONFIG_HOME, 'lftools', 'lftools.ini')
+LFTOOLS_CONFIG_FILE = os.path.join(XDG_CONFIG_HOME, "lftools", "lftools.ini")
def get_config():
if option:
try:
return config.get(section, option)
- except (configparser.NoOptionError,
- configparser.NoSectionError) as e:
+ except (configparser.NoOptionError, configparser.NoSectionError) as e:
raise e
else:
config = get_config()
config.set(section, option, value)
- with open(LFTOOLS_CONFIG_FILE, 'w') as configfile:
+ with open(LFTOOLS_CONFIG_FILE, "w") as configfile:
config.write(configfile)
os.chdir(dir)
compress_types = [
- '**/*.html',
- '**/*.log',
- '**/*.txt',
- '**/*.xml',
+ "**/*.html",
+ "**/*.log",
+ "**/*.txt",
+ "**/*.xml",
]
paths = []
for _type in compress_types:
paths.extend(glob.glob(search, recursive=True))
for _file in paths:
- with open(_file, 'rb') as src, gzip.open('{}.gz'.format(_file), 'wb') as dest:
+ with open(_file, "rb") as src, gzip.open("{}.gz".format(_file), "wb") as dest:
shutil.copyfileobj(src, dest)
os.remove(_file)
def _format_url(url):
"""Ensure url starts with http and trim trailing '/'s."""
- start_pattern = re.compile('^(http|https)://')
+ start_pattern = re.compile("^(http|https)://")
if not start_pattern.match(url):
- url = 'http://{}'.format(url)
+ url = "http://{}".format(url)
- if url.endswith('/'):
- url = url.rstrip('/')
+ if url.endswith("/"):
+ url = url.rstrip("/")
return url
"""Execute a request post, return the resp."""
resp = {}
try:
- upload_file = open(file_to_upload, 'rb')
+ upload_file = open(file_to_upload, "rb")
except FileNotFoundError:
- raise FileNotFoundError(
- errno.ENOENT, os.strerror(errno.ENOENT), file_to_upload)
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_to_upload)
- files = {'file': upload_file}
+ files = {"file": upload_file}
try:
if parameters:
resp = requests.post(url, data=parameters, files=files)
elif resp.status_code == 404:
raise requests.HTTPError("Did not find repository.")
- if not str(resp.status_code).startswith('20'):
- raise requests.HTTPError("Failed to upload to Nexus with status code: {}.\n{}\n{}".format(
- resp.status_code, resp.text, file_to_upload))
+ if not str(resp.status_code).startswith("20"):
+ raise requests.HTTPError(
+ "Failed to upload to Nexus with status code: {}.\n{}\n{}".format(
+ resp.status_code, resp.text, file_to_upload
+ )
+ )
return resp
"""Execute a request put, return the resp."""
resp = {}
try:
- upload_file = open(file_to_upload, 'rb')
+ upload_file = open(file_to_upload, "rb")
except FileNotFoundError:
- raise FileNotFoundError(
- errno.ENOENT, os.strerror(errno.ENOENT), file_to_upload)
+ raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_to_upload)
- files = {'file': upload_file}
+ files = {"file": upload_file}
try:
if parameters:
resp = requests.put(url, data=parameters, files=files)
if resp.status_code == 404:
raise requests.HTTPError("Did not find repository.")
- if not str(resp.status_code).startswith('20'):
- raise requests.HTTPError("Failed to upload to Nexus with status code: {}.\n{}\n{}".format(
- resp.status_code, resp.text, file_to_upload))
+ if not str(resp.status_code).startswith("20"):
+ raise requests.HTTPError(
+ "Failed to upload to Nexus with status code: {}.\n{}\n{}".format(
+ resp.status_code, resp.text, file_to_upload
+ )
+ )
def _get_node_from_xml(xml_data, tag_name):
"""Extract tag data from xml data."""
- log.debug('xml={}'.format(xml_data))
+ log.debug("xml={}".format(xml_data))
try:
dom1 = parseString(xml_data)
duplicated_list = []
for i in range(len(no_dups_lst)):
- if (lst.count(no_dups_lst[i]) > 1):
+ if lst.count(no_dups_lst[i]) > 1:
duplicated_list.append(no_dups_lst[i])
log.debug("duplicates : {}".format(duplicated_list))
:arg str pattern: Space-separated list of Unix style glob patterns.
(default: None)
"""
- archives_dir = os.path.join(workspace, 'archives')
+ archives_dir = os.path.join(workspace, "archives")
dest_dir = os.getcwd()
- log.debug('Copying files from {} with pattern \'{}\' to {}.'.format(
- workspace, pattern, dest_dir))
- log.debug('archives_dir = {}'.format(archives_dir))
+ log.debug("Copying files from {} with pattern '{}' to {}.".format(workspace, pattern, dest_dir))
+ log.debug("archives_dir = {}".format(archives_dir))
if os.path.exists(archives_dir):
if os.path.isfile(archives_dir):
- log.error('Archives {} is a file, not a directory.'.format(archives_dir))
- raise OSError(errno.ENOENT, 'Not a directory', archives_dir)
+ log.error("Archives {} is a file, not a directory.".format(archives_dir))
+ raise OSError(errno.ENOENT, "Not a directory", archives_dir)
else:
- log.debug('Archives dir {} does exist.'.format(archives_dir))
+ log.debug("Archives dir {} does exist.".format(archives_dir))
for file_or_dir in os.listdir(archives_dir):
f = os.path.join(archives_dir, file_or_dir)
try:
- log.debug('Moving {}'.format(f))
+ log.debug("Moving {}".format(f))
shutil.move(f, dest_dir)
except shutil.Error as e:
log.error(e)
- raise OSError(errno.EPERM, 'Could not move to', archives_dir)
+ raise OSError(errno.EPERM, "Could not move to", archives_dir)
else:
- log.error('Archives dir {} does not exist.'.format(archives_dir))
- raise OSError(errno.ENOENT, 'Missing directory', archives_dir)
+ log.error("Archives dir {} does not exist.".format(archives_dir))
+ raise OSError(errno.ENOENT, "Missing directory", archives_dir)
if pattern is None:
return
paths = []
for p in no_dups_pattern:
- if p == '': # Skip empty patterns as they are invalid
+ if p == "": # Skip empty patterns as they are invalid
continue
search = os.path.join(workspace, p)
paths.extend(glob.glob(search, recursive=True))
- log.debug('Files found: {}'.format(paths))
+ log.debug("Files found: {}".format(paths))
no_dups_paths = _remove_duplicates_and_sort(paths)
for src in no_dups_paths:
if len(os.path.basename(src)) > 255:
- log.warn('Filename {} is over 255 characters. Skipping...'.format(
- os.path.basename(src)))
+ log.warn("Filename {} is over 255 characters. Skipping...".format(os.path.basename(src)))
- dest = os.path.join(dest_dir, src[len(workspace)+1:])
- log.debug('{} -> {}'.format(src, dest))
+ dest = os.path.join(dest_dir, src[len(workspace) + 1 :])
+ log.debug("{} -> {}".format(src, dest))
if os.path.isfile(src):
try:
os.makedirs(os.path.dirname(dest))
shutil.move(src, dest)
else:
- log.info('Not copying directories: {}.'.format(src))
+ log.info("Not copying directories: {}.".format(src))
def deploy_archives(nexus_url, nexus_path, workspace, pattern=None):
"""
nexus_url = _format_url(nexus_url)
previous_dir = os.getcwd()
- work_dir = tempfile.mkdtemp(prefix='lftools-da.')
+ work_dir = tempfile.mkdtemp(prefix="lftools-da.")
os.chdir(work_dir)
- log.debug('workspace: {}, work_dir: {}'.format(workspace, work_dir))
+ log.debug("workspace: {}, work_dir: {}".format(workspace, work_dir))
copy_archives(workspace, pattern)
_compress_text(work_dir)
- archives_zip = shutil.make_archive(
- '{}/archives'.format(workspace), 'zip')
- log.debug('archives zip: {}'.format(archives_zip))
- deploy_nexus_zip(nexus_url, 'logs', nexus_path, archives_zip)
+ archives_zip = shutil.make_archive("{}/archives".format(workspace), "zip")
+ log.debug("archives zip: {}".format(archives_zip))
+ deploy_nexus_zip(nexus_url, "logs", nexus_path, archives_zip)
os.chdir(previous_dir)
shutil.rmtree(work_dir)
"""
nexus_url = _format_url(nexus_url)
previous_dir = os.getcwd()
- work_dir = tempfile.mkdtemp(prefix='lftools-dl.')
+ work_dir = tempfile.mkdtemp(prefix="lftools-dl.")
os.chdir(work_dir)
- log.debug('work_dir: {}'.format(work_dir))
+ log.debug("work_dir: {}".format(work_dir))
- build_details = open('_build-details.log', 'w+')
- build_details.write('build-url: {}'.format(build_url))
+ build_details = open("_build-details.log", "w+")
+ build_details.write("build-url: {}".format(build_url))
- with open('_sys-info.log', 'w+') as sysinfo_log:
+ with open("_sys-info.log", "w+") as sysinfo_log:
sys_cmds = []
- log.debug('Platform: {}'.format(sys.platform))
+ log.debug("Platform: {}".format(sys.platform))
if sys.platform == "linux" or sys.platform == "linux2":
sys_cmds = [
- ['uname', '-a'],
- ['lscpu'],
- ['nproc'],
- ['df', '-h'],
- ['free', '-m'],
- ['ip', 'addr'],
- ['sar', '-b', '-r', '-n', 'DEV'],
- ['sar', '-P', 'ALL'],
+ ["uname", "-a"],
+ ["lscpu"],
+ ["nproc"],
+ ["df", "-h"],
+ ["free", "-m"],
+ ["ip", "addr"],
+ ["sar", "-b", "-r", "-n", "DEV"],
+ ["sar", "-P", "ALL"],
]
for c in sys_cmds:
try:
- output = subprocess.check_output(c).decode('utf-8')
+ output = subprocess.check_output(c).decode("utf-8")
except OSError: # TODO: Switch to FileNotFoundError when Python < 3.5 support is dropped.
- log.debug('Command not found: {}'.format(c))
+ log.debug("Command not found: {}".format(c))
continue
- output = '---> {}:\n{}\n'.format(' '.join(c), output)
+ output = "---> {}:\n{}\n".format(" ".join(c), output)
sysinfo_log.write(output)
log.info(output)
MAGIC_STRING = "-----END_OF_BUILD-----"
log.info(MAGIC_STRING)
- resp = requests.get('{}/consoleText'.format(_format_url(build_url)))
- with io.open('console.log', 'w+', encoding='utf-8') as f:
- f.write(six.text_type(resp.content.decode('utf-8').split(MAGIC_STRING)[0]))
+ resp = requests.get("{}/consoleText".format(_format_url(build_url)))
+ with io.open("console.log", "w+", encoding="utf-8") as f:
+ f.write(six.text_type(resp.content.decode("utf-8").split(MAGIC_STRING)[0]))
- resp = requests.get('{}/timestamps?time=HH:mm:ss&appendLog'.format(_format_url(build_url)))
- with io.open('console-timestamp.log', 'w+', encoding='utf-8') as f:
- f.write(six.text_type(resp.content.decode('utf-8').split(MAGIC_STRING)[0]))
+ resp = requests.get("{}/timestamps?time=HH:mm:ss&appendLog".format(_format_url(build_url)))
+ with io.open("console-timestamp.log", "w+", encoding="utf-8") as f:
+ f.write(six.text_type(resp.content.decode("utf-8").split(MAGIC_STRING)[0]))
_compress_text(work_dir)
- console_zip = tempfile.NamedTemporaryFile(prefix='lftools-dl', delete=True)
- log.debug('console-zip: {}'.format(console_zip.name))
- shutil.make_archive(console_zip.name, 'zip', work_dir)
- deploy_nexus_zip(nexus_url, 'logs', nexus_path, '{}.zip'.format(console_zip.name))
+ console_zip = tempfile.NamedTemporaryFile(prefix="lftools-dl", delete=True)
+ log.debug("console-zip: {}".format(console_zip.name))
+ shutil.make_archive(console_zip.name, "zip", work_dir)
+ deploy_nexus_zip(nexus_url, "logs", nexus_path, "{}.zip".format(console_zip.name))
console_zip.close()
os.chdir(previous_dir)
tst_path \
tests/fixtures/deploy/zip-test-files/test.zip
"""
- url = '{}/service/local/repositories/{}/content-compressed/{}'.format(
- _format_url(nexus_url),
- nexus_repo,
- nexus_path)
- log.debug('Uploading {} to {}'.format(zip_file, url))
+ url = "{}/service/local/repositories/{}/content-compressed/{}".format(
+ _format_url(nexus_url), nexus_repo, nexus_path
+ )
+ log.debug("Uploading {} to {}".format(zip_file, url))
try:
resp = _request_post_file(url, zip_file)
for f in files:
log.info(" {}".format(f))
raise requests.HTTPError(e)
- log.debug('{}: {}'.format(resp.status_code, resp.text))
+ log.debug("{}: {}".format(resp.status_code, resp.text))
def nexus_stage_repo_create(nexus_url, staging_profile_id):
Sample:
lftools deploy nexus-stage-repo-create 192.168.1.26:8081/nexus/ 93fb68073c18
"""
- nexus_url = '{0}/service/local/staging/profiles/{1}/start'.format(
- _format_url(nexus_url),
- staging_profile_id)
+ nexus_url = "{0}/service/local/staging/profiles/{1}/start".format(_format_url(nexus_url), staging_profile_id)
log.debug("Nexus URL = {}".format(nexus_url))
</promoteRequest>
"""
- headers = {'Content-Type': 'application/xml'}
+ headers = {"Content-Type": "application/xml"}
resp = _request_post(nexus_url, xml, headers)
log.debug("resp.status_code = {}".format(resp.status_code))
log.debug("resp.text = {}".format(resp.text))
- if re.search('nexus-error', resp.text):
- error_msg = _get_node_from_xml(resp.text, 'msg')
- if re.search('.*profile with id:.*does not exist.', error_msg):
+ if re.search("nexus-error", resp.text):
+ error_msg = _get_node_from_xml(resp.text, "msg")
+ if re.search(".*profile with id:.*does not exist.", error_msg):
_log_error_and_exit("Staging profile id {} not found.".format(staging_profile_id))
_log_error_and_exit(error_msg)
if not resp.status_code == 201:
_log_error_and_exit("Failed with status code {}".format(resp.status_code), resp.text)
- staging_repo_id = _get_node_from_xml(resp.text, 'stagedRepositoryId')
+ staging_repo_id = _get_node_from_xml(resp.text, "stagedRepositoryId")
log.debug("staging_repo_id = {}".format(staging_repo_id))
return staging_repo_id
Sample:
lftools deploy nexus-stage-repo-close 192.168.1.26:8081/nexsus/ 93fb68073c18 test1-1031
"""
- nexus_url = '{0}/service/local/staging/profiles/{1}/finish'.format(
- _format_url(nexus_url),
- staging_profile_id)
+ nexus_url = "{0}/service/local/staging/profiles/{1}/finish".format(_format_url(nexus_url), staging_profile_id)
log.debug("Nexus URL = {}".format(nexus_url))
log.debug("staging_repo_id = {}".format(staging_repo_id))
<description>Close staging repository.</description>
</data>
</promoteRequest>
- """.format(staging_repo_id)
+ """.format(
+ staging_repo_id
+ )
- headers = {'Content-Type': 'application/xml'}
+ headers = {"Content-Type": "application/xml"}
resp = _request_post(nexus_url, xml, headers)
log.debug("resp.status_code = {}".format(resp.status_code))
log.debug("resp.text = {}".format(resp.text))
- if re.search('nexus-error', resp.text):
- error_msg = _get_node_from_xml(resp.text, 'msg')
+ if re.search("nexus-error", resp.text):
+ error_msg = _get_node_from_xml(resp.text, "msg")
else:
error_msg = resp.text
if resp.status_code == 404:
_log_error_and_exit("Did not find nexus site: {}".format(nexus_url))
- if re.search('invalid state: closed', error_msg):
+ if re.search("invalid state: closed", error_msg):
_log_error_and_exit("Staging repository is already closed.")
- if re.search('Missing staging repository:', error_msg):
+ if re.search("Missing staging repository:", error_msg):
_log_error_and_exit("Staging repository do not exist.")
if not resp.status_code == 201:
_log_error_and_exit("Failed with status code {}".format(resp.status_code), resp.text)
-def upload_maven_file_to_nexus(nexus_url, nexus_repo_id,
- group_id, artifact_id, version,
- packaging, file, classifier=None):
+def upload_maven_file_to_nexus(
+ nexus_url, nexus_repo_id, group_id, artifact_id, version, packaging, file, classifier=None
+):
"""Upload file to Nexus as a Maven artifact.
This function will upload an artifact to Nexus while providing all of
http://192.168.1.26:8081/nexus/content/repositories/releases \
tests/fixtures/deploy/zip-test-files
"""
- url = '{}/service/local/artifact/maven/content'.format(_format_url(nexus_url))
+ url = "{}/service/local/artifact/maven/content".format(_format_url(nexus_url))
- log.info('Uploading URL: {}'.format(url))
+ log.info("Uploading URL: {}".format(url))
params = {}
- params.update({'r': (None, '{}'.format(nexus_repo_id))})
- params.update({'g': (None, '{}'.format(group_id))})
- params.update({'a': (None, '{}'.format(artifact_id))})
- params.update({'v': (None, '{}'.format(version))})
- params.update({'p': (None, '{}'.format(packaging))})
+ params.update({"r": (None, "{}".format(nexus_repo_id))})
+ params.update({"g": (None, "{}".format(group_id))})
+ params.update({"a": (None, "{}".format(artifact_id))})
+ params.update({"v": (None, "{}".format(version))})
+ params.update({"p": (None, "{}".format(packaging))})
if classifier:
- params.update({'c': (None, '{}'.format(classifier))})
+ params.update({"c": (None, "{}".format(classifier))})
- log.debug('Maven Parameters: {}'.format(params))
+ log.debug("Maven Parameters: {}".format(params))
resp = _request_post_file(url, file, params)
- if re.search('nexus-error', resp.text):
- error_msg = _get_node_from_xml(resp.text, 'msg')
+ if re.search("nexus-error", resp.text):
+ error_msg = _get_node_from_xml(resp.text, "msg")
raise requests.HTTPError("Nexus Error: {}".format(error_msg))
http://192.168.1.26:8081/nexus/content/repositories/releases \
tests/fixtures/deploy/zip-test-files
"""
+
def _get_filesize(file):
bytesize = os.path.getsize(file)
if bytesize == 0:
def _deploy_nexus_upload(file):
# Fix file path, and call _request_put_file.
- nexus_url_with_file = '{}/{}'.format(_format_url(nexus_repo_url), file)
+ nexus_url_with_file = "{}/{}".format(_format_url(nexus_repo_url), file)
log.info("Attempting to upload {} ({})".format(file, _get_filesize(file)))
if _request_put_file(nexus_url_with_file, file):
return True
file_list = []
previous_dir = os.getcwd()
os.chdir(deploy_dir)
- files = glob.glob('**/*', recursive=True)
+ files = glob.glob("**/*", recursive=True)
for file in files:
if os.path.isfile(file):
base_name = os.path.basename(file)
# Skip blacklisted files
- if (base_name == "_remote.repositories" or
- base_name == "resolver-status.properties"):
+ if base_name == "_remote.repositories" or base_name == "resolver-status.properties":
continue
if not snapshot:
# default threads to CPU count / 2 so we're a nice neighbor to other builds
log.info("#######################################################")
- log.info('Deploying directory {} to {}'.format(deploy_dir, nexus_repo_url))
+ log.info("Deploying directory {} to {}".format(deploy_dir, nexus_repo_url))
workers = int(cpu_count() / 2)
with concurrent.futures.ThreadPoolExecutor(max_workers=workers) as executor:
if data == data:
pass
except Exception as e:
- log.error('Uploading {}: {}'.format(filename, e))
+ log.error("Uploading {}: {}".format(filename, e))
# wait until all threads complete (successfully or not)
# then log the results of the upload threads
staging_repo_id = nexus_stage_repo_create(nexus_url, staging_profile_id)
log.info("Staging repository {} created.".format(staging_repo_id))
- deploy_nexus_url = '{0}/service/local/staging/deployByRepositoryId/{1}'.format(
- _format_url(nexus_url),
- staging_repo_id)
+ deploy_nexus_url = "{0}/service/local/staging/deployByRepositoryId/{1}".format(
+ _format_url(nexus_url), staging_repo_id
+ )
log.debug("Nexus Staging URL: {}".format(_format_url(deploy_nexus_url)))
deploy_nexus(deploy_nexus_url, deploy_dir)
team_members.append(user.login)
log.info(" - '{}'".format(user.login))
- return(team_members)
+ return team_members
def prvotes(organization, repo, pr):
for approve in approvals:
if approve.state == ("APPROVED"):
approval_list.append(approve.user.login)
- return(approval_list)
+ return approval_list
def helper_user_github(ctx, organization, user, team, delete, admin):
my_teams = [team]
this_team = [team for team in teams() if team.name in my_teams]
for t in this_team:
- team_id = (t.id)
+ team_id = t.id
team = org.get_team(team_id)
teams = []
teams.append(team)
##############################################################################
"""Jenkins."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
import os
def jjb_ini():
"""Return jenkins_jobs.ini file location if it exists, None otherwise."""
- global_conf = '/etc/jenkins_jobs/jenkins_jobs.ini'
- user_conf = os.path.join(
- os.path.expanduser('~'),
- '.config',
- 'jenkins_jobs',
- 'jenkins_jobs.ini')
- local_conf = os.path.join(
- os.getcwd(),
- 'jenkins_jobs.ini')
+ global_conf = "/etc/jenkins_jobs/jenkins_jobs.ini"
+ user_conf = os.path.join(os.path.expanduser("~"), ".config", "jenkins_jobs", "jenkins_jobs.ini")
+ local_conf = os.path.join(os.getcwd(), "jenkins_jobs.ini")
conf = None
if os.path.isfile(local_conf):
JJB_INI = jjb_ini()
-class Jenkins():
+class Jenkins:
"""lftools Jenkins object."""
def __init__(self, server, user=None, password=None, config_file=None):
if not self.config_file:
self.config_file = JJB_INI
- if '://' not in server:
+ if "://" not in server:
if self.config_file:
- log.debug('Using config from {}'.format(self.config_file))
+ log.debug("Using config from {}".format(self.config_file))
config = configparser.SafeConfigParser()
config.read(self.config_file)
if config.has_section(server):
- user = config.get(server, 'user')
- password = config.get(server, 'password')
- server = config.get(server, 'url')
+ user = config.get(server, "user")
+ password = config.get(server, "password")
+ server = config.get(server, "url")
else:
- log.error('[{}] section not found in {}'
- .format(server, self.config_file))
+ log.error("[{}] section not found in {}".format(server, self.config_file))
else:
- log.debug('jenkins_jobs.ini not found in any of the default paths.')
- server = 'https://localhost:8080'
+ log.debug("jenkins_jobs.ini not found in any of the default paths.")
+ server = "https://localhost:8080"
- self.server = jenkins.Jenkins(
- server,
- username=user,
- password=password)
+ self.server = jenkins.Jenkins(server, username=user, password=password)
self.url = server
##############################################################################
"""Jenkins token functions."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
api token.
"""
if change:
- log.debug('Resetting Jenkins API token on {}'.format(url))
+ log.debug("Resetting Jenkins API token on {}".format(url))
else:
- log.debug('Fetching Jenkins API token from {}'.format(url))
+ log.debug("Fetching Jenkins API token from {}".format(url))
- server = jenkins.Jenkins(
- url,
- username=username,
- password=password)
+ server = jenkins.Jenkins(url, username=username, password=password)
get_token = """
import hudson.model.*
ApiTokenProperty t = u.getProperty(ApiTokenProperty.class)
def token = t.tokenStore.generateNewToken("{}")
println token.plainValue
-""".format(username, name)
+""".format(
+ username, name
+ )
token = server.run_script(get_token)
return token
def check_response_code(response):
"""Response Code Helper function."""
if response.status_code != 200:
- raise requests.HTTPError("Authorization failed with the following "
- "error:\n{}: {}".format(response.status_code,
- response.text))
+ raise requests.HTTPError(
+ "Authorization failed with the following " "error:\n{}: {}".format(response.status_code, response.text)
+ )
def helper_check_group_exists(group):
"""List members of a group."""
access_token, url = oauth_helper()
url = PARSE(url, group)
- headers = {'Authorization': 'Bearer ' + access_token}
+ headers = {"Authorization": "Bearer " + access_token}
response = requests.get(url, headers=headers)
status_code = response.status_code
return status_code
else:
access_token, url = oauth_helper()
url = PARSE(url, group)
- headers = {'Authorization': 'Bearer ' + access_token}
+ headers = {"Authorization": "Bearer " + access_token}
response = requests.get(url, headers=headers)
try:
check_response_code(response)
except requests.HTTPError as e:
log.error(e)
exit(1)
- result = (response.json())
+ result = response.json()
members = result["members"]
log.debug(json.dumps(members, indent=4, sort_keys=True))
return members
"""Add and remove users from groups."""
access_token, url = oauth_helper()
url = PARSE(url, group)
- headers = {'Authorization': 'Bearer ' + access_token}
+ headers = {"Authorization": "Bearer " + access_token}
data = {"username": user}
if delete:
- log.info('Deleting %s from %s' % (user, group))
+ log.info("Deleting %s from %s" % (user, group))
response = requests.delete(url, json=data, headers=headers)
else:
- log.info('Adding %s to %s' % (user, group))
+ log.info("Adding %s to %s" % (user, group))
response = requests.put(url, json=data, headers=headers)
try:
check_response_code(response)
except requests.HTTPError as e:
log.error(e)
exit(1)
- result = (response.json())
+ result = response.json()
log.debug(json.dumps(result, indent=4, sort_keys=True))
def helper_invite(email, group):
"""Email invitation to join group."""
access_token, url = oauth_helper()
- prejoin = group + '/invite'
+ prejoin = group + "/invite"
url = PARSE(url, prejoin)
- headers = {'Authorization': 'Bearer ' + access_token}
+ headers = {"Authorization": "Bearer " + access_token}
data = {"mail": email}
- log.info('Validating email %s' % email)
+ log.info("Validating email %s" % email)
if validate_email(email):
- log.info('Inviting %s to join %s' % (email, group))
+ log.info("Inviting %s to join %s" % (email, group))
response = requests.post(url, json=data, headers=headers)
try:
check_response_code(response)
except requests.HTTPError as e:
log.error(e)
exit(1)
- result = (response.json())
+ result = response.json()
log.debug(json.dumps(result, indent=4, sort_keys=True))
else:
- log.error("Email '%s' is not valid, not inviting to %s" %
- (email, group))
+ log.error("Email '%s' is not valid, not inviting to %s" % (email, group))
def helper_create_group(group):
log.error("Group %s already exists. Exiting..." % group)
else:
access_token, url = oauth_helper()
- url = '{}/'.format(url)
- headers = {'Authorization': 'Bearer ' + access_token}
+ url = "{}/".format(url)
+ headers = {"Authorization": "Bearer " + access_token}
data = {"title": group, "type": "group"}
log.debug(data)
- log.info('Creating group %s' % group)
+ log.info("Creating group %s" % group)
response = requests.post(url, json=data, headers=headers)
try:
check_response_code(response)
except requests.HTTPError as e:
log.error(e)
exit(1)
- result = (response.json())
+ result = response.json()
log.debug(json.dumps(result, indent=4, sort_keys=True))
info_data = yaml.safe_load(file)
except yaml.YAMLError as exc:
print(exc)
- id = 'id'
+ id = "id"
if githuborg:
- id = 'github_id'
- ldap_data = helper_list(ctx=False, organization=githuborg, repos=False, audit=False,
- full=False, teams=False, repofeatures=False, team=group)
+ id = "github_id"
+ ldap_data = helper_list(
+ ctx=False,
+ organization=githuborg,
+ repos=False,
+ audit=False,
+ full=False,
+ teams=False,
+ repofeatures=False,
+ team=group,
+ )
else:
ldap_data = helper_search_members(group)
- committer_info = info_data['committers']
+ committer_info = info_data["committers"]
info_committers = []
for count, item in enumerate(committer_info):
else:
for count, item in enumerate(ldap_data):
- committer = ldap_data[count]['username']
+ committer = ldap_data[count]["username"]
ldap_committers.append(committer)
all_users = ldap_committers + info_committers
if noop is False:
log.info("Removing user %s from group %s" % (user, group))
if githuborg:
- helper_user_github(ctx=False, organization=githuborg, user=user,
- team=group, delete=True, admin=False)
+ helper_user_github(
+ ctx=False, organization=githuborg, user=user, team=group, delete=True, admin=False
+ )
else:
helper_user(user, group, "--delete")
if noop is False:
log.info("Adding user %s to group %s" % (user, group))
if githuborg:
- helper_user_github(ctx=False, organization=githuborg, user=user,
- team=group, delete=False, admin=False)
+ helper_user_github(
+ ctx=False, organization=githuborg, user=user, team=group, delete=False, admin=False
+ )
else:
helper_user(user, group, "")
##############################################################################
"""Scans code for a valid license header."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
Note: This function only supports '#' comments for license headers.
"""
- text = ''
- with open(_file, 'r') as data:
+ text = ""
+ with open(_file, "r") as data:
lines = data.readlines()
for line in lines:
- result = re.search(r'\s*[#]', line)
+ result = re.search(r"\s*[#]", line)
if not result:
break
- string = re.sub(r'^\s*#+', '', line).strip()
- if (bool(re.match('Copyright', string, re.I)) # Ignore the Copyright line
- or bool(re.match('^#!', line, re.I))): # Ignore #! shebang lines
+ string = re.sub(r"^\s*#+", "", line).strip()
+ if bool(re.match("Copyright", string, re.I)) or bool( # Ignore the Copyright line
+ re.match("^#!", line, re.I)
+ ): # Ignore #! shebang lines
continue
- text += ' {}'.format(string)
+ text += " {}".format(string)
# Strip unnecessary spacing
- text = re.sub('\s+', ' ', text).strip()
+ text = re.sub("\s+", " ", text).strip()
return text
code_header = get_header_text(code_file)
if not license_header in code_header:
- log.error('{} is missing or has incorrect license header.'.format(code_file))
+ log.error("{} is missing or has incorrect license header.".format(code_file))
return 1
return 0
if missing_license:
sys.exit(1)
- log.info('Scan completed did not detect any files missing license headers.')
+ log.info("Scan completed did not detect any files missing license headers.")
"""Library for working with Sonatype Nexus REST API."""
-__author__ = 'Andrew Grimberg'
-__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2017 Andrew Grimberg'
+__author__ = "Andrew Grimberg"
+__license__ = "Apache 2.0"
+__copyright__ = "Copyright 2017 Andrew Grimberg"
import json
import logging
self.auth = None
self.headers = {
- 'Accept': 'application/json',
- 'Content-Type': 'application/json',
+ "Accept": "application/json",
+ "Content-Type": "application/json",
}
def set_full_baseurl(self):
"service/local/repo_targets",
"service/siesta/rest/beta/read-only",
"service/rest/beta/read-only",
- "service/rest/v1/read-only"
+ "service/rest/v1/read-only",
]
for endpoint in endpoints:
url = os.path.join(self.baseurl, endpoint)
def get_target(self, name):
"""Get the ID of a given target name."""
- url = os.path.join(self.baseurl, 'repo_targets')
+ url = os.path.join(self.baseurl, "repo_targets")
targets = requests.get(url, auth=self.auth, headers=self.headers).json()
- for priv in targets['data']:
- if priv['name'] == name:
- return priv['id']
+ for priv in targets["data"]:
+ if priv["name"] == name:
+ return priv["id"]
raise LookupError("No target found named '{}'".format(name))
def create_target(self, name, patterns):
"""Create a target with the given patterns."""
- url = os.path.join(self.baseurl, 'repo_targets')
+ url = os.path.join(self.baseurl, "repo_targets")
- target = {
- 'data': {
- 'contentClass': 'any',
- 'patterns': patterns,
- 'name': name,
- }
- }
+ target = {"data": {"contentClass": "any", "patterns": patterns, "name": name,}}
- json_data = json.dumps(target).encode(encoding='latin-1')
+ json_data = json.dumps(target).encode(encoding="latin-1")
r = requests.post(url, auth=self.auth, headers=self.headers, data=json_data)
if r.status_code != requests.codes.created:
raise Exception("Target not created for '{}', code '{}'".format(name, r.status_code))
- return r.json()['data']['id']
+ return r.json()["data"]["id"]
def get_priv(self, name, priv):
"""Get the ID for the privilege with the given name and privlege type."""
def get_priv_by_name(self, name):
"""Get the ID for the privilege with the given name."""
- url = os.path.join(self.baseurl, 'privileges')
+ url = os.path.join(self.baseurl, "privileges")
privileges = requests.get(url, auth=self.auth, headers=self.headers).json()
- for priv in privileges['data']:
- if priv['name'] == name:
- return priv['id']
+ for priv in privileges["data"]:
+ if priv["name"] == name:
+ return priv["id"]
raise LookupError("No privilege found named '{}'".format(name))
delete
update
"""
- url = os.path.join(self.baseurl, 'privileges_target')
+ url = os.path.join(self.baseurl, "privileges_target")
privileges = {
- 'data': {
- 'name': name,
- 'description': name,
- 'method': [
- priv,
- ],
- 'repositoryGroupId': '',
- 'repositoryId': '',
- 'repositoryTargetId': target_id,
- 'type': 'target',
+ "data": {
+ "name": name,
+ "description": name,
+ "method": [priv,],
+ "repositoryGroupId": "",
+ "repositoryId": "",
+ "repositoryTargetId": target_id,
+ "type": "target",
}
}
- json_data = json.dumps(privileges).encode(encoding='latin-1')
+ json_data = json.dumps(privileges).encode(encoding="latin-1")
r = requests.post(url, auth=self.auth, headers=self.headers, data=json_data)
privileges = r.json()
if r.status_code != requests.codes.created:
raise Exception("Privilege not created for '{}', code '{}'".format(name, r.status_code))
- return privileges['data'][0]['id']
+ return privileges["data"][0]["id"]
def get_role(self, name):
"""Get the id of a role with a given name."""
- url = os.path.join(self.baseurl, 'roles')
+ url = os.path.join(self.baseurl, "roles")
roles = requests.get(url, auth=self.auth, headers=self.headers).json()
- for role in roles['data']:
- if role['name'] == name:
- return role['id']
+ for role in roles["data"]:
+ if role["name"] == name:
+ return role["id"]
# If name is not found in names, check ids
- for role in roles['data']:
- if role['id'] == name:
- return role['id']
+ for role in roles["data"]:
+ if role["id"] == name:
+ return role["id"]
raise LookupError("No role with name '{}'".format(name))
def create_role(self, name, privs, role_id="", description="", roles=[]):
"""Create a role with the given privileges."""
- url = os.path.join(self.baseurl, 'roles')
+ url = os.path.join(self.baseurl, "roles")
role = {
- 'data': {
- 'id': role_id if role_id else name,
- 'name': name,
- 'description': description if description else name,
- 'privileges': privs,
- 'roles': ['repository-any-read'] + roles,
- 'sessionTimeout': 60,
+ "data": {
+ "id": role_id if role_id else name,
+ "name": name,
+ "description": description if description else name,
+ "privileges": privs,
+ "roles": ["repository-any-read"] + roles,
+ "sessionTimeout": 60,
}
}
- json_data = json.dumps(role).encode(encoding='latin-1')
+ json_data = json.dumps(role).encode(encoding="latin-1")
log.debug("Sending role {} to Nexus".format(json_data))
- r = requests.post(url, auth=self.auth, headers=self.headers,
- data=json_data)
+ r = requests.post(url, auth=self.auth, headers=self.headers, data=json_data)
if r.status_code != requests.codes.created:
if r.status_code == 400 and "errors" in r.json().keys():
error_msgs = ""
for error in r.json()["errors"]:
error_msgs += error["msg"] + "\n"
- raise Exception("Role not created for '{}', code '{}', failed "
- "with the following errors: {}".format(
- name, r.status_code, error_msgs))
+ raise Exception(
+ "Role not created for '{}', code '{}', failed "
+ "with the following errors: {}".format(name, r.status_code, error_msgs)
+ )
else:
- raise Exception("Role not created for '{}', code '{}'".format(
- role_id, r.status_code))
+ raise Exception("Role not created for '{}', code '{}'".format(role_id, r.status_code))
- return r.json()['data']['id']
+ return r.json()["data"]["id"]
def get_user(self, user_id):
"""Determine if a user with a given userId exists."""
- url = os.path.join(self.baseurl, 'users')
+ url = os.path.join(self.baseurl, "users")
users = requests.get(url, auth=self.auth, headers=self.headers).json()
- for user in users['data']:
- if user['userId'] == user_id:
+ for user in users["data"]:
+ if user["userId"] == user_id:
return
raise LookupError("No user with id '{}'".format(user_id))
User is created with the nx-deployment role attached
"""
- url = os.path.join(self.baseurl, 'users')
+ url = os.path.join(self.baseurl, "users")
user = {
- 'data': {
- 'userId': name,
- 'email': "{}-deploy@{}".format(name, domain),
- 'firstName': name,
- 'lastName': 'Deployment',
- 'roles': [
- role_id,
- 'nx-deployment',
- ],
- 'password': password,
- 'status': 'active',
+ "data": {
+ "userId": name,
+ "email": "{}-deploy@{}".format(name, domain),
+ "firstName": name,
+ "lastName": "Deployment",
+ "roles": [role_id, "nx-deployment",],
+ "password": password,
+ "status": "active",
}
}
for role in extra_roles:
- user['data']['roles'].append(self.get_role(role))
+ user["data"]["roles"].append(self.get_role(role))
- json_data = json.dumps(user).encode(encoding='latin-1')
+ json_data = json.dumps(user).encode(encoding="latin-1")
user = requests.post(url, auth=self.auth, headers=self.headers, data=json_data)
def get_repo_group(self, name):
"""Get the repository ID for a repo group that has a specific name."""
- url = os.path.join(self.baseurl, 'repo_groups')
+ url = os.path.join(self.baseurl, "repo_groups")
repos = requests.get(url, auth=self.auth, headers=self.headers).json()
- for repo in repos['data']:
- if repo['name'] == name:
- return repo['id']
+ for repo in repos["data"]:
+ if repo["name"] == name:
+ return repo["id"]
raise LookupError("No repository group named '{}'".format(name))
def get_repo_group_details(self, repoId):
"""Get the current configuration of a given repo group with a specific ID."""
- url = os.path.join(self.baseurl, 'repo_groups', repoId)
+ url = os.path.join(self.baseurl, "repo_groups", repoId)
- return requests.get(url, auth=self.auth, headers=self.headers).json()['data']
+ return requests.get(url, auth=self.auth, headers=self.headers).json()["data"]
def update_repo_group_details(self, repoId, data):
"""Update the given repo group with new configuration."""
- url = os.path.join(self.baseurl, 'repo_groups', repoId)
+ url = os.path.join(self.baseurl, "repo_groups", repoId)
- repo = {
- 'data': data
- }
+ repo = {"data": data}
- json_data = json.dumps(repo).encode(encoding='latin-1')
+ json_data = json.dumps(repo).encode(encoding="latin-1")
requests.put(url, auth=self.auth, headers=self.headers, data=json_data)
"""Return credentials for Nexus instantiation."""
if settings_file:
try:
- with open(settings_file, 'r') as f:
+ with open(settings_file, "r") as f:
settings = yaml.safe_load(f)
except IOError:
log.error('Error reading settings file "{}"'.format(settings_file))
sys.exit(1)
- if url and set(['user', 'password']).issubset(settings):
- settings['nexus'] = url
+ if url and set(["user", "password"]).issubset(settings):
+ settings["nexus"] = url
return settings
- elif set(['nexus', 'user', 'password']).issubset(settings):
+ elif set(["nexus", "user", "password"]).issubset(settings):
return settings
elif url:
try:
auth_url = url.replace("https://", "")
user = config.get_setting(auth_url, "username")
password = config.get_setting(auth_url, "password")
- except (configparser.NoOptionError,
- configparser.NoSectionError):
- log.info("Failed to get nexus credentials; using empty username "
- "and password.")
+ except (configparser.NoOptionError, configparser.NoSectionError):
+ log.info("Failed to get nexus credentials; using empty username " "and password.")
return {"nexus": url, "user": "", "password": ""}
return {"nexus": url, "user": user, "password": password}
- log.error('Please define a settings.yaml file, or include a url if using '
- + 'lftools.ini')
+ log.error("Please define a settings.yaml file, or include a url if using " + "lftools.ini")
sys.exit(1)
"""Return URL from settings file, if it exists."""
if settings_file:
try:
- with open(settings_file, 'r') as f:
+ with open(settings_file, "r") as f:
settings = yaml.safe_load(f)
except IOError:
log.error('Error reading settings file "{}"'.format(settings_file))
to be in the correct reverse sorted order. There is a problem with
Nexus where it is not doing this like it should be.
"""
- with open(settings_file, 'r') as f:
+ with open(settings_file, "r") as f:
settings = yaml.safe_load(f)
- for setting in ['nexus', 'user', 'password']:
+ for setting in ["nexus", "user", "password"]:
if not setting in settings:
- log.error('{} needs to be defined'.format(setting))
+ log.error("{} needs to be defined".format(setting))
sys.exit(1)
- _nexus = Nexus(settings['nexus'], settings['user'], settings['password'])
+ _nexus = Nexus(settings["nexus"], settings["user"], settings["password"])
try:
- repo_id = _nexus.get_repo_group('Staging Repositories')
+ repo_id = _nexus.get_repo_group("Staging Repositories")
except LookupError as e:
log.error("Staging repository 'Staging Repositories' cannot be found")
sys.exit(1)
repo_details = _nexus.get_repo_group_details(repo_id)
- sorted_repos = sorted(repo_details['repositories'], key=lambda k: k['id'], reverse=True)
+ sorted_repos = sorted(repo_details["repositories"], key=lambda k: k["id"], reverse=True)
for repos in sorted_repos:
- del repos['resourceURI']
- del repos['name']
+ del repos["resourceURI"]
+ del repos["name"]
repo_update = repo_details
- repo_update['repositories'] = sorted_repos
- del repo_update['contentResourceURI']
- del repo_update['repoType']
+ repo_update["repositories"] = sorted_repos
+ del repo_update["contentResourceURI"]
+ del repo_update["repoType"]
_nexus.update_repo_group_details(repo_id, repo_update)
:arg str settings: Settings file containing administrative credentials and
information.
"""
- with open(config_file, 'r') as f:
+ with open(config_file, "r") as f:
config = yaml.safe_load(f)
- with open(settings_file, 'r') as f:
+ with open(settings_file, "r") as f:
settings = yaml.safe_load(f)
- for setting in ['email_domain', 'base_groupId', 'repositories']:
+ for setting in ["email_domain", "base_groupId", "repositories"]:
if not setting in config:
- log.error('{} needs to be defined in {}'.format(setting, config_file))
+ log.error("{} needs to be defined in {}".format(setting, config_file))
sys.exit(1)
- for setting in ['nexus', 'user', 'password']:
+ for setting in ["nexus", "user", "password"]:
if not setting in settings:
- log.error('{} needs to be defined in {}'.format(setting, settings_file))
+ log.error("{} needs to be defined in {}".format(setting, settings_file))
sys.exit(1)
- _nexus = Nexus(settings['nexus'], settings['user'], settings['password'])
+ _nexus = Nexus(settings["nexus"], settings["user"], settings["password"])
def create_nexus_perms(name, targets, email, password, extra_privs=[]):
# Create target
# Create privileges
privs_set = [
- 'create',
- 'delete',
- 'read',
- 'update',
- ]
+ "create",
+ "delete",
+ "read",
+ "update",
+ ]
privs = {}
for priv in privs_set:
try:
privs[priv] = _nexus.get_priv(name, priv)
- log.info('Creating {} privileges.'.format(priv))
+ log.info("Creating {} privileges.".format(priv))
except LookupError as e:
privs[priv] = _nexus.create_priv(name, target_id, priv)
# Create Role
try:
role_id = _nexus.get_role(name)
- log.info('Role {} already exists.'.format(role_id))
+ log.info("Role {} already exists.".format(role_id))
except LookupError as e:
role_id = _nexus.create_role(name, privs)
# Create user
try:
_nexus.get_user(name)
- log.info('User {} already exists.'.format(name))
+ log.info("User {} already exists.".format(name))
except LookupError as e:
_nexus.create_user(name, email, role_id, password, extra_privs)
def build_repo(repo, repoId, config, base_groupId, global_privs, email_domain):
- log.info('-> Building for {}.{} in Nexus'.format(base_groupId, repo))
- groupId = '{}.{}'.format(base_groupId, repo)
+ log.info("-> Building for {}.{} in Nexus".format(base_groupId, repo))
+ groupId = "{}.{}".format(base_groupId, repo)
target = util.create_repo_target_regex(groupId)
- if not global_privs and not 'extra_privs' in config:
+ if not global_privs and not "extra_privs" in config:
extra_privs = []
elif global_privs:
extra_privs = global_privs
- if 'extra_privs' in config:
- extra_privs += config['extra_privs']
- log.info('Privileges for this repo:' + ', '.join(extra_privs))
- elif 'extra_privs' in config:
- extra_privs = config['extra_privs']
- log.info('Privileges for this repo:' + ', '.join(extra_privs))
-
- create_nexus_perms(
- repoId,
- [target],
- email_domain,
- config['password'],
- extra_privs)
-
- log.info('-> Finished successfully for {}.{}!!\n'.format(base_groupId, repo))
-
- if 'repositories' in config:
- for sub_repo in config['repositories']:
+ if "extra_privs" in config:
+ extra_privs += config["extra_privs"]
+ log.info("Privileges for this repo:" + ", ".join(extra_privs))
+ elif "extra_privs" in config:
+ extra_privs = config["extra_privs"]
+ log.info("Privileges for this repo:" + ", ".join(extra_privs))
+
+ create_nexus_perms(repoId, [target], email_domain, config["password"], extra_privs)
+
+ log.info("-> Finished successfully for {}.{}!!\n".format(base_groupId, repo))
+
+ if "repositories" in config:
+ for sub_repo in config["repositories"]:
sub_repo_id = "{}-{}".format(repoId, sub_repo)
- build_repo(
- sub_repo,
- sub_repo_id,
- config['repositories'][sub_repo],
- groupId,
- extra_privs,
- email_domain)
-
- log.warning('Nexus repo creation started. Aborting now could leave tasks undone!')
- if 'global_privs' in config:
- global_privs = config['global_privs']
+ build_repo(sub_repo, sub_repo_id, config["repositories"][sub_repo], groupId, extra_privs, email_domain)
+
+ log.warning("Nexus repo creation started. Aborting now could leave tasks undone!")
+ if "global_privs" in config:
+ global_privs = config["global_privs"]
else:
global_privs = []
- for repo in config['repositories']:
- build_repo(repo, repo, config['repositories'][repo],
- config['base_groupId'], global_privs, config['email_domain'])
+ for repo in config["repositories"]:
+ build_repo(
+ repo, repo, config["repositories"][repo], config["base_groupId"], global_privs, config["email_domain"]
+ )
def create_roles(config_file, settings_file):
:arg str settings: Settings file containing administrative credentials and
information.
"""
- with open(config_file, 'r') as f:
+ with open(config_file, "r") as f:
config = yaml.safe_load(f)
- with open(settings_file, 'r') as f:
+ with open(settings_file, "r") as f:
settings = yaml.safe_load(f)
- for setting in ['nexus', 'user', 'password']:
+ for setting in ["nexus", "user", "password"]:
if setting not in settings:
- log.error('{} needs to be defined in {}'.format(setting,
- settings_file))
+ log.error("{} needs to be defined in {}".format(setting, settings_file))
sys.exit(1)
- _nexus = Nexus(settings['nexus'], settings['user'], settings['password'])
+ _nexus = Nexus(settings["nexus"], settings["user"], settings["password"])
- required_settings = ['name', 'roles']
+ required_settings = ["name", "roles"]
for role in config:
for setting in required_settings:
if setting not in config[role]:
- log.error('{} not defined for role {}. Please ensure that {} '
- 'are defined for each role in {}'.format(
- setting, role, required_settings, config_file))
+ log.error(
+ "{} not defined for role {}. Please ensure that {} "
+ "are defined for each role in {}".format(setting, role, required_settings, config_file)
+ )
sys.exit(1)
subrole_ids = []
config[role]["privileges"] = []
for role in config:
- _nexus.create_role(config[role]["name"], config[role]["privileges"],
- role, config[role]["description"],
- config[role]["roles"])
+ _nexus.create_role(
+ config[role]["name"], config[role]["privileges"], role, config[role]["description"], config[role]["roles"]
+ )
def search(settings_file, url, repo, pattern):
if not url and settings_file:
url = get_url(settings_file)
if not url:
- log.error("ERROR: No Nexus URL provided. Please provide Nexus URL in "
- + "settings file or with the --server parameter.")
+ log.error(
+ "ERROR: No Nexus URL provided. Please provide Nexus URL in "
+ + "settings file or with the --server parameter."
+ )
sys.exit(1)
_nexus = Nexus(url)
:arg str csv_path: Path to write out csv file of matching images.
"""
if not images:
- log.warning("{}.{} called with empty images list".format(
- __name__, sys._getframe().f_code.co_name))
+ log.warning("{}.{} called with empty images list".format(__name__, sys._getframe().f_code.co_name))
return
count = len(images)
included_keys = images[0].keys()
if csv_path:
- with open(csv_path, 'wb') as out_file:
- dw = csv.DictWriter(out_file, fieldnames=included_keys,
- quoting=csv.QUOTE_ALL)
+ with open(csv_path, "wb") as out_file:
+ dw = csv.DictWriter(out_file, fieldnames=included_keys, quoting=csv.QUOTE_ALL)
dw.writeheader()
for image in images:
- dw.writerow({k: v for k, v in image.items() if
- k in included_keys})
+ dw.writerow({k: v for k, v in image.items() if k in included_keys})
for image in images:
- log.info("Name: {}\nVersion: {}\nID: {}\n\n".format(
- image["name"], image["version"], image["id"]))
+ log.info("Name: {}\nVersion: {}\nID: {}\n\n".format(image["name"], image["version"], image["id"]))
log.info("Found {} images matching the query".format(count))
"""
credentials = get_credentials(settings_file, url)
- _nexus = Nexus(credentials['nexus'], credentials['user'],
- credentials['password'])
+ _nexus = Nexus(credentials["nexus"], credentials["user"], credentials["password"])
for image in images:
_nexus.delete_image(image)
</stagingActivityEvent>
"""
tmp_list = []
- act_soup = bs4.BeautifulSoup(str(act), 'xml')
+ act_soup = bs4.BeautifulSoup(str(act), "xml")
stagingProperties = act_soup.find_all("stagingProperty")
for stagingProperty in stagingProperties:
value = stagingProperty.find("value")
tmp_list.append(value.text)
- txt_str = ' --> '.join(map(str, tmp_list))
+ txt_str = " --> ".join(map(str, tmp_list))
return txt_str
:arg flag --verify-only: Only verify repo and exit.
"""
credentials = get_credentials(None, nexus_url)
- _nexus = Nexus(credentials['nexus'], credentials['user'],
- credentials['password'])
+ _nexus = Nexus(credentials["nexus"], credentials["user"], credentials["password"])
for repo in repos:
# Verify repo before releasing
response = requests.get(activity_url, auth=_nexus.auth)
if response.status_code != 200:
- raise requests.HTTPError("Verification of repo failed with the following error:"
- "\n{}: {}".format(response.status_code, response.text))
+ raise requests.HTTPError(
+ "Verification of repo failed with the following error:"
+ "\n{}: {}".format(response.status_code, response.text)
+ )
- soup = bs4.BeautifulSoup(response.text, 'xml')
+ soup = bs4.BeautifulSoup(response.text, "xml")
values = soup.find_all("value")
activities = soup.find_all("stagingActivityEvent")
failures = []
for act in activities:
# Check for failures
- if re.search('ruleFailed', act.text):
+ if re.search("ruleFailed", act.text):
failures2.append(get_activity_text(act))
- if re.search('repositoryCloseFailed', act.text):
+ if re.search("repositoryCloseFailed", act.text):
failures2.append(get_activity_text(act))
# Check if already released
- if re.search('repositoryReleased', act.text):
+ if re.search("repositoryReleased", act.text):
successes.append(get_activity_text(act))
# Check if already Closed
- if re.search('repositoryClosed', act.text):
+ if re.search("repositoryClosed", act.text):
is_repo_closed.append(get_activity_text(act))
# Check for other failures (old code part). only add them if not already there
# Should be possible to remove this part, but could not find a sample XML with these values.
for message in values:
- if re.search('StagingRulesFailedException', message.text):
+ if re.search("StagingRulesFailedException", message.text):
if add_str_if_not_exist(message, failures2):
failures.append(message.text)
- if re.search('Invalid', message.text):
+ if re.search("Invalid", message.text):
if add_str_if_not_exist(message, failures2):
failures.append(message.text)
# Start check result
if len(failures) != 0 or len(failures2) != 0:
- log.info('\n'.join(map(str, failures2)))
- log.info('\n'.join(map(str, failures)))
+ log.info("\n".join(map(str, failures2)))
+ log.info("\n".join(map(str, failures)))
log.info("One or more rules failed")
sys.exit(1)
else:
log.info("PASS: No rules have failed")
if len(successes) != 0:
- log.info('\n'.join(map(str, successes)))
+ log.info("\n".join(map(str, successes)))
log.info("Nothing to do: Repository already released")
sys.exit(0)
response = requests.post(request_url, json=data, auth=_nexus.auth)
if response.status_code != 201:
- raise requests.HTTPError("Release failed with the following error:"
- "\n{}: {}".format(response.status_code,
- response.text))
+ raise requests.HTTPError(
+ "Release failed with the following error:" "\n{}: {}".format(response.status_code, response.text)
+ )
else:
log.info("Successfully released {}".format(str(repo)))
while closed is False:
response = requests.get(activity_url, auth=_nexus.auth).text
root = et.fromstring(response) # nosec
- events = root.findall('./stagingActivity')
+ events = root.findall("./stagingActivity")
for event in events:
- name = event.find('name')
+ name = event.find("name")
if name.text == "close":
- stopped = event.find('stopped')
- log.info('Repo closed at: {}'.format(stopped.text))
+ stopped = event.find("stopped")
+ log.info("Repo closed at: {}".format(stopped.text))
closed = True
else:
- log.info('Repo is not fully closed, sleeping for five minutes.')
+ log.info("Repo is not fully closed, sleeping for five minutes.")
sleep(300)
def _remove_http_from_url(url):
"""Remove http[s]:// from url."""
- if url.startswith('https://'):
- return url[len('https://'):]
- if url.startswith('http://'):
- return url[len('http://'):]
+ if url.startswith("https://"):
+ return url[len("https://") :]
+ if url.startswith("http://"):
+ return url[len("http://") :]
return url
def _format_image_id(id):
"""Remove sha256: from beginning of string."""
if id.startswith("sha256:"):
- return id[len('sha256:'):]
+ return id[len("sha256:") :]
else:
return id
where keyword = STAGING or SNAPSHOT
'^\d+.\d+.\d+-(STAGING|SNAPSHOT)-(20\d{2})(\d{2})(\d{2})T([01]\d|2[0-3])([0-5]\d)([0-5]\d)Z$'
"""
- pattern = re.compile(r'^\d+.\d+.\d+$')
+ pattern = re.compile(r"^\d+.\d+.\d+$")
log.debug("validate tag {} in {} --> {}".format(check_tag, self.repo, pattern.match(check_tag)))
return pattern.match(check_tag)
self.invalid.append(new_tag)
-class NexusTagClass (TagClass):
+class NexusTagClass(TagClass):
"""Nexus Tag class.
This class fetches and stores all Nexus3 tags for a repository.
self.repository_exist = False
return
- log.debug("r.status_code = {}, ok={}".format(
- r.status_code, r.status_code == requests.codes.ok))
+ log.debug("r.status_code = {}, ok={}".format(r.status_code, r.status_code == requests.codes.ok))
if r.status_code == requests.codes.ok:
raw_tags = r.text
- raw_tags = raw_tags.replace('"', '')
- raw_tags = raw_tags.replace('}', '')
- raw_tags = raw_tags.replace(']', '')
- raw_tags = raw_tags.replace(' ', '')
- raw_tags = raw_tags.split('[')
- TmpSplittedTags = raw_tags[1].split(',')
+ raw_tags = raw_tags.replace('"', "")
+ raw_tags = raw_tags.replace("}", "")
+ raw_tags = raw_tags.replace("]", "")
+ raw_tags = raw_tags.replace(" ", "")
+ raw_tags = raw_tags.split("[")
+ TmpSplittedTags = raw_tags[1].split(",")
if len(TmpSplittedTags) > 0:
for tag_2_add in TmpSplittedTags:
self.add_tag(tag_2_add)
- log.debug("Nexus {}/{} has tag {}".format(
- org_name, repo_name, tag_2_add))
+ log.debug("Nexus {}/{} has tag {}".format(org_name, repo_name, tag_2_add))
else:
self.repository_exist = False
-class DockerTagClass (TagClass):
+class DockerTagClass(TagClass):
"""Docker tag class.
This class fetches and stores all docker tags for a repository.
self.repository_exist = False
return
- log.debug("r.status_code = {}, ok={}".format(
- r.status_code, r.status_code == requests.codes.ok))
+ log.debug("r.status_code = {}, ok={}".format(r.status_code, r.status_code == requests.codes.ok))
if r.status_code == requests.codes.ok:
raw_tags = r.text
- raw_tags = raw_tags.replace('}]', '')
- raw_tags = raw_tags.replace('[{', '')
- raw_tags = raw_tags.replace('{', '')
- raw_tags = raw_tags.replace('"', '')
- raw_tags = raw_tags.replace(' ', '')
- TmpSplittedTuple = raw_tags.split('}')
+ raw_tags = raw_tags.replace("}]", "")
+ raw_tags = raw_tags.replace("[{", "")
+ raw_tags = raw_tags.replace("{", "")
+ raw_tags = raw_tags.replace('"', "")
+ raw_tags = raw_tags.replace(" ", "")
+ TmpSplittedTuple = raw_tags.split("}")
if len(TmpSplittedTuple) > 0:
for tuple in TmpSplittedTuple:
- tmp_tuple = tuple.split(':')
+ tmp_tuple = tuple.split(":")
if len(tmp_tuple) > 1:
self.add_tag(tmp_tuple[2].strip())
- log.debug("Docker {}/{} has tag {}".format(
- org_name, repo_name, tmp_tuple[2]))
+ log.debug("Docker {}/{} has tag {}".format(org_name, repo_name, tmp_tuple[2]))
else:
self.repository_exist = False
Docker repository will be based on the Nexus3 repo name.
But replacing all '/' with '-'
"""
- self.docker_repo_name = self.nexus_repo_name.replace('/', '-')
- log.debug("ProjName = {} ---> Docker name = {}".format(
- self.nexus_repo_name, self.docker_repo_name))
+ self.docker_repo_name = self.nexus_repo_name.replace("/", "-")
+ log.debug("ProjName = {} ---> Docker name = {}".format(self.nexus_repo_name, self.docker_repo_name))
def _populate_tags_to_copy(self):
"""Populate tags_to_copy list.
Check that all valid Nexus3 tags are among the Docker Hub valid tags.
If not, add them to the tags_2_copy list.
"""
- log.debug('Populate {} has valid Nexus3 {} and valid Docker Hub {}'.format(
- self.docker_repo_name,
- len(self.nexus_tags.valid), len(self.docker_tags.valid)))
+ log.debug(
+ "Populate {} has valid Nexus3 {} and valid Docker Hub {}".format(
+ self.docker_repo_name, len(self.nexus_tags.valid), len(self.docker_tags.valid)
+ )
+ )
if len(self.nexus_tags.valid) > 0:
for nexustag in self.nexus_tags.valid:
if not nexustag in self.docker_tags.valid:
- log.debug('Need to copy tag {} from {}'.format(nexustag, self.nexus_repo_name))
+ log.debug("Need to copy tag {} from {}".format(nexustag, self.nexus_repo_name))
self.tags_2_copy.add_tag(nexustag)
- def _pull_tag_push_msg(self, info_text, count, retry_text='', progbar=False):
+ def _pull_tag_push_msg(self, info_text, count, retry_text="", progbar=False):
"""Print a formated message using log.info."""
- due_to_txt = ''
+ due_to_txt = ""
if len(retry_text) > 0:
- due_to_txt = 'due to {}'.format(retry_text)
- _attempt_str = 'Attempt '
- b4_txt_template = _attempt_str + '{:2d}'
- b4_txt = ''.ljust(len(_attempt_str)+2)
+ due_to_txt = "due to {}".format(retry_text)
+ _attempt_str = "Attempt "
+ b4_txt_template = _attempt_str + "{:2d}"
+ b4_txt = "".ljust(len(_attempt_str) + 2)
if count > 1:
b4_txt = b4_txt_template.format(count)
if progbar:
else:
log.info("{}: {} {}".format(b4_txt, info_text, due_to_txt))
- def _docker_pull(self, nexus_image_str, count, tag, retry_text='', progbar=False):
+ def _docker_pull(self, nexus_image_str, count, tag, retry_text="", progbar=False):
"""Pull an image from Nexus."""
- self._pull_tag_push_msg('Pulling Nexus3 image {} with tag {}'.format(
- self.calc_nexus_project_name(), tag), count, retry_text)
+ self._pull_tag_push_msg(
+ "Pulling Nexus3 image {} with tag {}".format(self.calc_nexus_project_name(), tag), count, retry_text
+ )
image = self.docker_client.images.pull(nexus_image_str)
return image
- def _docker_tag(self, count, image, tag, retry_text='', progbar=False):
+ def _docker_tag(self, count, image, tag, retry_text="", progbar=False):
"""Tag the image with proper docker name and version."""
- self._pull_tag_push_msg('Creating docker image {} with tag {}'.format(
- self.calc_docker_project_name(), tag), count, retry_text)
+ self._pull_tag_push_msg(
+ "Creating docker image {} with tag {}".format(self.calc_docker_project_name(), tag), count, retry_text
+ )
image.tag(self.calc_docker_project_name(), tag=tag)
def _docker_push(self, count, image, tag, retry_text, progbar=False):
"""Push the docker image to Docker Hub."""
- self._pull_tag_push_msg('Pushing docker image {} with tag {}'.format(
- self.calc_docker_project_name(), tag), count, retry_text)
+ self._pull_tag_push_msg(
+ "Pushing docker image {} with tag {}".format(self.calc_docker_project_name(), tag), count, retry_text
+ )
self.docker_client.images.push(self.calc_docker_project_name(), tag=tag)
- def _docker_cleanup(self, count, image, tag, retry_text='', progbar=False):
+ def _docker_cleanup(self, count, image, tag, retry_text="", progbar=False):
"""Remove the local copy of the image."""
image_id = _format_image_id(image.short_id)
- self._pull_tag_push_msg('Cleanup docker image {} with tag {} and id {}'.format(
- self.calc_docker_project_name(), tag, image_id), count, retry_text)
+ self._pull_tag_push_msg(
+ "Cleanup docker image {} with tag {} and id {}".format(self.calc_docker_project_name(), tag, image_id),
+ count,
+ retry_text,
+ )
self.docker_client.images.remove(image.id, force=True)
def docker_pull_tag_push(self, progbar=False):
for tag in self.tags_2_copy.valid:
org_path = _remove_http_from_url(NEXUS3_BASE)
- nexus_image_str = '{}/{}/{}:{}'.format(org_path, self.org_name, self.nexus_repo_name, tag)
+ nexus_image_str = "{}/{}/{}:{}".format(org_path, self.org_name, self.nexus_repo_name, tag)
log.debug("Nexus Image Str = {}".format(nexus_image_str))
- for stage in ['pull', 'tag', 'push', 'cleanup']:
+ for stage in ["pull", "tag", "push", "cleanup"]:
cnt_break_loop = 1
- retry_text = ''
- while (True):
+ retry_text = ""
+ while True:
try:
- log.debug('stage = {}. cnt_break_loop {}, reason {}'.format(stage, cnt_break_loop, retry_text))
- if stage == 'pull':
+ log.debug("stage = {}. cnt_break_loop {}, reason {}".format(stage, cnt_break_loop, retry_text))
+ if stage == "pull":
image = self._docker_pull(nexus_image_str, cnt_break_loop, tag, retry_text, progbar)
break
- if stage == 'tag':
+ if stage == "tag":
self._docker_tag(cnt_break_loop, image, tag, retry_text, progbar)
break
- if stage == 'push':
+ if stage == "push":
self._docker_push(cnt_break_loop, image, tag, retry_text, progbar)
break
- if stage == 'cleanup':
+ if stage == "cleanup":
self._docker_cleanup(cnt_break_loop, image, tag, retry_text, progbar)
break
except socket.timeout:
- retry_text = 'Socket Timeout'
+ retry_text = "Socket Timeout"
except requests.exceptions.ConnectionError:
- retry_text = 'Connection Error'
+ retry_text = "Connection Error"
except urllib3.exceptions.ReadTimeoutError:
- retry_text = 'Read Timeout Error'
+ retry_text = "Read Timeout Error"
except docker.errors.APIError:
- retry_text = 'API Error'
+ retry_text = "API Error"
cnt_break_loop = cnt_break_loop + 1
- if (cnt_break_loop > 90):
+ if cnt_break_loop > 90:
raise requests.HTTPError(retry_text)
-def get_nexus3_catalog(org_name='', find_pattern='', exact_match=False):
+def get_nexus3_catalog(org_name="", find_pattern="", exact_match=False):
"""Main function to collect all Nexus3 repositories.
This function will collect the Nexus catalog for all projects starting with
global project_max_len_chars
project_max_len_chars = 0
- containing_str = ''
+ containing_str = ""
if len(find_pattern) > 0:
containing_str = ', and containing "{}"'.format(find_pattern)
if exact_match:
log.debug("r.status_code = {}, ok={}".format(r.status_code, r.status_code == requests.codes.ok))
if r.status_code == requests.codes.ok:
raw_catalog = r.text
- raw_catalog = raw_catalog.replace('"', '')
- raw_catalog = raw_catalog.replace(' ', '')
- raw_catalog = raw_catalog.replace('}', '')
- raw_catalog = raw_catalog.replace('[', '')
- raw_catalog = raw_catalog.replace(']', '')
- raw_catalog = raw_catalog.split(':')
- TmpCatalog = raw_catalog[1].split(',')
+ raw_catalog = raw_catalog.replace('"', "")
+ raw_catalog = raw_catalog.replace(" ", "")
+ raw_catalog = raw_catalog.replace("}", "")
+ raw_catalog = raw_catalog.replace("[", "")
+ raw_catalog = raw_catalog.replace("]", "")
+ raw_catalog = raw_catalog.split(":")
+ TmpCatalog = raw_catalog[1].split(",")
for word in TmpCatalog:
# Remove all projects that do not start with org_name
if word.startswith(org_name):
use_this_repo = False
# Remove org_name/ from word, so we only get repository left
- project = (org_name, word[len(org_name)+1:])
+ project = (org_name, word[len(org_name) + 1 :])
# If a specific search string has been specified, search for it
# Empty string will match all words
if word.find(find_pattern) >= 0 and not exact_match:
log.debug("Added project {} to my list".format(project[1]))
if len(project[1]) > project_max_len_chars:
project_max_len_chars = len(project[1])
- log.debug("# TmpCatalog {}, NexusCatalog {}, DIFF = {}".format(
- len(TmpCatalog), len(NexusCatalog), len(TmpCatalog)-len(NexusCatalog)))
+ log.debug(
+ "# TmpCatalog {}, NexusCatalog {}, DIFF = {}".format(
+ len(TmpCatalog), len(NexusCatalog), len(TmpCatalog) - len(NexusCatalog)
+ )
+ )
return True
NbrProjects = len(NexusCatalog)
log.info("Fetching tags from Nexus3 and Docker Hub for {} projects".format(NbrProjects))
if progbar:
- pbar = tqdm.tqdm(total=NbrProjects, bar_format='{l_bar}{bar}|{n_fmt}/{total_fmt} [{elapsed}]')
+ pbar = tqdm.tqdm(total=NbrProjects, bar_format="{l_bar}{bar}|{n_fmt}/{total_fmt} [{elapsed}]")
def _fetch_all_tags(proj):
"""Helper function for multi-threading.
_tot_tags = _tot_tags + len(proj.tags_2_copy.valid)
log.info("About to start copying from Nexus3 to Docker Hub for {} missing tags".format(_tot_tags))
if progbar:
- pbar = tqdm.tqdm(total=_tot_tags, bar_format='{l_bar}{bar}|{n_fmt}/{total_fmt} [{elapsed}]')
+ pbar = tqdm.tqdm(total=_tot_tags, bar_format="{l_bar}{bar}|{n_fmt}/{total_fmt} [{elapsed}]")
def _docker_pull_tag_push(proj):
"""Helper function for multi-threading.
def print_nexus_docker_proj_names():
"""Print Nexus3 - Docker Hub repositories."""
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
log.info("")
log_str = fmt_str.format(NEXUS3_PROJ_NAME_HEADER)
log_str = "{}{}".format(log_str, DOCKER_PROJ_NAME_HEADER)
log.info(log_str)
- log.info('-'*project_max_len_chars*2)
+ log.info("-" * project_max_len_chars * 2)
docker_i = 0
for proj in projects:
log_str = fmt_str.format(proj.nexus_repo_name)
def print_tags_header(header_str, col_1_str):
"""Print simple header."""
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
log.info(header_str)
log_str = fmt_str.format(col_1_str)
- log_str = "{}{}".format(log_str, 'Tags')
+ log_str = "{}{}".format(log_str, "Tags")
log.info(log_str)
- log.info('-'*project_max_len_chars*2)
+ log.info("-" * project_max_len_chars * 2)
def print_tags_data(proj_name, tags):
"""Print tag data."""
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
if len(tags) > 0:
log_str = fmt_str.format(proj_name)
tag_i = 0
def print_stats():
"""Print simple repo/tag statistics."""
print_tags_header("Tag statistics (V=Valid, I=InValid)", NEXUS3_PROJ_NAME_HEADER)
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
for proj in projects:
- log.info("{}Nexus V:{} I:{} -- Docker V:{} I:{}".format(
- fmt_str.format(proj.nexus_repo_name),
- len(proj.nexus_tags.valid),
- len(proj.nexus_tags.invalid),
- len(proj.docker_tags.valid),
- len(proj.docker_tags.invalid)))
+ log.info(
+ "{}Nexus V:{} I:{} -- Docker V:{} I:{}".format(
+ fmt_str.format(proj.nexus_repo_name),
+ len(proj.nexus_tags.valid),
+ len(proj.nexus_tags.invalid),
+ len(proj.docker_tags.valid),
+ len(proj.docker_tags.invalid),
+ )
+ )
log.info("")
def print_missing_docker_proj():
"""Print missing docker repos."""
log.info("Missing corresponding Docker Project")
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
log_str = fmt_str.format(NEXUS3_PROJ_NAME_HEADER)
log_str = "{}{}".format(log_str, DOCKER_PROJ_NAME_HEADER)
log.info(log_str)
- log.info('-'*project_max_len_chars*2)
+ log.info("-" * project_max_len_chars * 2)
all_docker_repos_found = True
for proj in projects:
if not proj.docker_tags.repository_exist:
def print_nexus_tags_to_copy():
"""Print tags that needs to be copied."""
log.info("Nexus project tags to copy to docker")
- fmt_str = '{:<'+str(project_max_len_chars)+'} : '
+ fmt_str = "{:<" + str(project_max_len_chars) + "} : "
log_str = fmt_str.format(NEXUS3_PROJ_NAME_HEADER)
log_str = "{}{}".format(log_str, "Tags to copy")
log.info(log_str)
- log.info('-'*project_max_len_chars*2)
+ log.info("-" * project_max_len_chars * 2)
for proj in projects:
if len(proj.tags_2_copy.valid) > 0:
log_str = ""
log.info("Summary: {} tags that should be copied from Nexus3 to Docker Hub.".format(_tot_tags))
-def start_point(org_name, find_pattern='', exact_match=False, summary=False,
- verbose=False, copy=False, progbar=False):
+def start_point(org_name, find_pattern="", exact_match=False, summary=False, verbose=False, copy=False, progbar=False):
"""Main function."""
# Verify find_pattern and specified_repo are not both used.
if len(find_pattern) == 0 and exact_match:
##############################################################################
"""Utility functions for Nexus."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import logging
def create_repo_target_regex(group_id):
"""Create a repo_target for Nexus use."""
- return '^/{}/.*'.format(group_id.replace('.', '[/\.]'))
+ return "^/{}/.*".format(group_id.replace(".", "[/\.]"))
refresh_token=refresh_token,
token_expiry=None,
token_uri=token_uri,
- user_agent=None)
+ user_agent=None,
+ )
credentials.refresh(httplib2.Http())
access_token = credentials.access_token
return access_token, url
##############################################################################
"""lftools openstack package."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
##############################################################################
"""CLI configuration for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import click
@click.group()
-@click.option('--os-cloud', envvar='OS_CLOUD', type=str, required=True)
+@click.option("--os-cloud", envvar="OS_CLOUD", type=str, required=True)
@click.pass_context
def openstack(ctx, os_cloud):
"""Provide an interface to OpenStack."""
- ctx.obj['os_cloud'] = os_cloud
+ ctx.obj["os_cloud"] = os_cloud
@openstack.group()
@click.command()
@click.option(
- '--ci-managed', type=bool, default=True,
- help='Filter only images that have the ci_managed=yes metadata set.')
+ "--ci-managed", type=bool, default=True, help="Filter only images that have the ci_managed=yes metadata set."
+)
+@click.option("--days", type=int, default=0, help="Find images older than or equal to days.")
+@click.option("--hide-public", type=bool, default=False, help="Ignore public images.")
@click.option(
- '--days', type=int, default=0,
- help='Find images older than or equal to days.')
-@click.option(
- '--hide-public', type=bool, default=False,
- help='Ignore public images.')
-@click.option(
- '--clouds', type=str, default=None,
- help=('Clouds (as defined in clouds.yaml) to remove images from. If not'
- 'passed will assume from os-cloud parameter. (optional)'))
+ "--clouds",
+ type=str,
+ default=None,
+ help=(
+ "Clouds (as defined in clouds.yaml) to remove images from. If not"
+ "passed will assume from os-cloud parameter. (optional)"
+ ),
+)
@click.pass_context
def cleanup(ctx, days, hide_public, ci_managed, clouds):
"""Cleanup old images."""
- os_image.cleanup(
- ctx.obj['os_cloud'],
- ci_managed=ci_managed,
- days=days,
- hide_public=hide_public,
- clouds=clouds)
+ os_image.cleanup(ctx.obj["os_cloud"], ci_managed=ci_managed, days=days, hide_public=hide_public, clouds=clouds)
@click.command()
@click.option(
- '--ci-managed', type=bool, default=True,
- help='Filter only images that have the ci_managed=yes metadata set.')
-@click.option(
- '--days', type=int, default=0,
- help='Find images older than or equal to days.')
-@click.option(
- '--hide-public', type=bool, default=False,
- help='Ignore public images.')
+ "--ci-managed", type=bool, default=True, help="Filter only images that have the ci_managed=yes metadata set."
+)
+@click.option("--days", type=int, default=0, help="Find images older than or equal to days.")
+@click.option("--hide-public", type=bool, default=False, help="Ignore public images.")
@click.pass_context
def list(ctx, days, hide_public, ci_managed):
"""List cloud images."""
- os_image.list(
- ctx.obj['os_cloud'],
- ci_managed=ci_managed,
- days=days,
- hide_public=hide_public)
+ os_image.list(ctx.obj["os_cloud"], ci_managed=ci_managed, days=days, hide_public=hide_public)
@click.command()
-@click.argument('image')
-@click.argument('dest', nargs=-1)
+@click.argument("image")
+@click.argument("dest", nargs=-1)
@click.pass_context
def share(ctx, image, dest):
"""Share image with another tenant."""
- os_image.share(ctx.obj['os_cloud'], image, dest)
+ os_image.share(ctx.obj["os_cloud"], image, dest)
@click.command()
-@click.argument('image')
-@click.argument('name', nargs=-1, required=True)
-@click.option(
- '--disk-format', type=str, default='qcow2',
- help='Disk format of image. (default: qcow2)')
+@click.argument("image")
+@click.argument("name", nargs=-1, required=True)
+@click.option("--disk-format", type=str, default="qcow2", help="Disk format of image. (default: qcow2)")
@click.pass_context
def upload(ctx, image, name, disk_format):
"""Upload image to OpenStack cloud."""
- name = ' '.join(name)
- os_image.upload(ctx.obj['os_cloud'], image, name, disk_format)
+ name = " ".join(name)
+ os_image.upload(ctx.obj["os_cloud"], image, name, disk_format)
image.add_command(cleanup)
@click.pass_context
def list_containers(ctx):
"""List available containers."""
- os_object.list_containers(ctx.obj['os_cloud'])
+ os_object.list_containers(ctx.obj["os_cloud"])
object.add_command(list_containers)
@click.command()
-@click.option(
- '--days', type=int, default=0,
- help='Find servers older than or equal to days.')
+@click.option("--days", type=int, default=0, help="Find servers older than or equal to days.")
@click.pass_context
def cleanup(ctx, days):
"""Cleanup old servers."""
- os_server.cleanup(
- ctx.obj['os_cloud'],
- days=days)
+ os_server.cleanup(ctx.obj["os_cloud"], days=days)
@click.command()
-@click.option(
- '--days', type=int, default=0,
- help='Find servers older than or equal to days.')
+@click.option("--days", type=int, default=0, help="Find servers older than or equal to days.")
@click.pass_context
def list(ctx, days):
"""List cloud servers."""
- os_server.list(
- ctx.obj['os_cloud'],
- days=days)
+ os_server.list(ctx.obj["os_cloud"], days=days)
@click.command()
-@click.argument('server')
-@click.option(
- '--minutes', type=int, default=0,
- help='Delete server if older than x minutes.')
+@click.argument("server")
+@click.option("--minutes", type=int, default=0, help="Delete server if older than x minutes.")
@click.pass_context
def remove(ctx, server, minutes):
"""Remove servers."""
- os_server.remove(
- ctx.obj['os_cloud'],
- server_name=server,
- minutes=minutes)
+ os_server.remove(ctx.obj["os_cloud"], server_name=server, minutes=minutes)
server.add_command(cleanup)
@click.command()
-@click.argument('name')
-@click.argument('template_file')
-@click.argument('parameter_file')
-@click.option(
- '--timeout', type=int, default=900,
- help='Stack create timeout in seconds.')
-@click.option(
- '--tries', type=int, default=2,
- help='Number of tries before giving up.')
+@click.argument("name")
+@click.argument("template_file")
+@click.argument("parameter_file")
+@click.option("--timeout", type=int, default=900, help="Stack create timeout in seconds.")
+@click.option("--tries", type=int, default=2, help="Number of tries before giving up.")
@click.pass_context
def create(ctx, name, template_file, parameter_file, timeout, tries):
"""Create stack."""
- os_stack.create(
- ctx.obj['os_cloud'],
- name,
- template_file,
- parameter_file,
- timeout,
- tries)
+ os_stack.create(ctx.obj["os_cloud"], name, template_file, parameter_file, timeout, tries)
@click.command()
-@click.argument('name_or_id')
-@click.option(
- '--force', type=bool, is_flag=True, default=False,
- help='Ignore timeout and continue with next stack.')
-@click.option(
- '--timeout', type=int, default=900,
- help='Stack delete timeout in seconds.')
+@click.argument("name_or_id")
+@click.option("--force", type=bool, is_flag=True, default=False, help="Ignore timeout and continue with next stack.")
+@click.option("--timeout", type=int, default=900, help="Stack delete timeout in seconds.")
@click.pass_context
def delete(ctx, name_or_id, force, timeout):
"""Delete stack."""
- os_stack.delete(
- ctx.obj['os_cloud'],
- name_or_id,
- force=force,
- timeout=timeout)
+ os_stack.delete(ctx.obj["os_cloud"], name_or_id, force=force, timeout=timeout)
@click.command()
-@click.argument('stack_name')
+@click.argument("stack_name")
@click.pass_context
def cost(ctx, stack_name):
"""Get Total Stack Cost."""
- os_stack.cost(
- ctx.obj['os_cloud'],
- stack_name)
+ os_stack.cost(ctx.obj["os_cloud"], stack_name)
-@click.command(name='delete-stale')
-@click.argument('jenkins_urls', nargs=-1)
+@click.command(name="delete-stale")
+@click.argument("jenkins_urls", nargs=-1)
@click.pass_context
def delete_stale(ctx, jenkins_urls):
"""Delete stale stacks.
both places. If a stack is no longer available in Jenkins but is in
OpenStack then it is considered stale. Stale stacks are then deleted.
"""
- os_stack.delete_stale(
- ctx.obj['os_cloud'],
- jenkins_urls)
+ os_stack.delete_stale(ctx.obj["os_cloud"], jenkins_urls)
stack.add_command(create)
@click.command()
-@click.option(
- '--days', type=int, default=0,
- help='Find volumes older than or equal to days.')
+@click.option("--days", type=int, default=0, help="Find volumes older than or equal to days.")
@click.pass_context
def cleanup(ctx, days):
"""Cleanup old volumes."""
- os_volume.cleanup(
- ctx.obj['os_cloud'],
- days=days)
+ os_volume.cleanup(ctx.obj["os_cloud"], days=days)
@click.command()
-@click.option(
- '--days', type=int, default=0,
- help='Find volumes older than or equal to days.')
+@click.option("--days", type=int, default=0, help="Find volumes older than or equal to days.")
@click.pass_context
def list(ctx, days):
"""List cloud volumes."""
- os_volume.list(
- ctx.obj['os_cloud'],
- days=days)
+ os_volume.list(ctx.obj["os_cloud"], days=days)
@click.command()
-@click.argument('volume_id')
-@click.option(
- '--minutes', type=int, default=0,
- help='Delete volumes if older than x minutes.')
+@click.argument("volume_id")
+@click.option("--minutes", type=int, default=0, help="Delete volumes if older than x minutes.")
@click.pass_context
def remove(ctx, volume_id, minutes):
"""Remove volumes."""
- os_volume.remove(
- ctx.obj['os_cloud'],
- volume_id=volume_id,
- minutes=minutes)
+ os_volume.remove(ctx.obj["os_cloud"], volume_id=volume_id, minutes=minutes)
volume.add_command(cleanup)
##############################################################################
"""Image related sub-commands for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
from datetime import datetime
from datetime import timedelta
for image in images:
if hide_public and image.is_public:
continue
- if ci_managed and image.metadata.get('ci_managed', None) != 'yes':
+ if ci_managed and image.metadata.get("ci_managed", None) != "yes":
continue
if image.protected:
continue
if days and (
- datetime.strptime(image.created_at, '%Y-%m-%dT%H:%M:%SZ')
- >= datetime.now() - timedelta(days=days)):
+ datetime.strptime(image.created_at, "%Y-%m-%dT%H:%M:%SZ") >= datetime.now() - timedelta(days=days)
+ ):
continue
filtered.append(image)
:arg str clouds: If passed, comma-separated list of clouds to remove image
from. Otherwise os_cloud will be used.
"""
+
def _remove_images_from_cloud(images, cloud):
- print('Removing {} images from {}.'.format(len(images), cloud.cloud_config.name))
+ print("Removing {} images from {}.".format(len(images), cloud.cloud_config.name))
for image in images:
if image.is_protected:
- print('WARNING: Image {} is protected. Cannot remove...'.format(image.name))
+ print("WARNING: Image {} is protected. Cannot remove...".format(image.name))
continue
try:
result = cloud.delete_image(image.name)
except shade.exc.OpenStackCloudException as e:
- if str(e).startswith('Multiple matches found for'):
- print('WARNING: {}. Skipping image...'.format(str(e)))
+ if str(e).startswith("Multiple matches found for"):
+ print("WARNING: {}. Skipping image...".format(str(e)))
continue
else:
- print('ERROR: Unexpected exception: {}'.format(str(e)))
+ print("ERROR: Unexpected exception: {}".format(str(e)))
raise
if not result:
- print('WARNING: Failed to remove \"{}\" from {}. Possibly already deleted.'
- .format(image.name, cloud.cloud_config.name))
+ print(
+ 'WARNING: Failed to remove "{}" from {}. Possibly already deleted.'.format(
+ image.name, cloud.cloud_config.name
+ )
+ )
else:
print('Removed "{}" from {}.'.format(image.name, cloud.cloud_config.name))
def share(os_cloud, image, clouds):
"""Share image with another tenant."""
+
def _get_image_id(os_cloud, image):
- cmd = ['openstack', '--os-cloud', os_cloud, 'image', 'list',
- '--name', image, '-f', 'value', '-c', 'ID']
+ cmd = ["openstack", "--os-cloud", os_cloud, "image", "list", "--name", image, "-f", "value", "-c", "ID"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
- log.debug('exit code: {}'.format(p.returncode))
- log.debug(stderr.decode('utf-8'))
+ log.debug("exit code: {}".format(p.returncode))
+ log.debug(stderr.decode("utf-8"))
if p.returncode:
sys.exit(1)
- image_id = stdout.decode('utf-8').strip()
- log.debug('image_id: {}'.format(image_id))
+ image_id = stdout.decode("utf-8").strip()
+ log.debug("image_id: {}".format(image_id))
return image_id
def _mark_image_shared(os_cloud, image):
- cmd = ['openstack', '--os-cloud', os_cloud, 'image', 'set', '--shared', image]
+ cmd = ["openstack", "--os-cloud", os_cloud, "image", "set", "--shared", image]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
- log.debug('exit code: {}'.format(p.returncode))
- log.debug(stderr.decode('utf-8'))
+ log.debug("exit code: {}".format(p.returncode))
+ log.debug(stderr.decode("utf-8"))
if p.returncode:
sys.exit(1)
def _get_token(cloud):
- cmd = ['openstack', '--os-cloud', cloud, 'token', 'issue',
- '-c', 'project_id', '-f', 'value']
+ cmd = ["openstack", "--os-cloud", cloud, "token", "issue", "-c", "project_id", "-f", "value"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
- log.debug('exit code: {}'.format(p.returncode))
- log.debug(stderr.decode('utf-8'))
+ log.debug("exit code: {}".format(p.returncode))
+ log.debug(stderr.decode("utf-8"))
if p.returncode:
sys.exit(1)
- token = stdout.decode('utf-8').strip()
- log.debug('token: {}'.format(token))
+ token = stdout.decode("utf-8").strip()
+ log.debug("token: {}".format(token))
return token
def _share_to_cloud(os_cloud, image, token):
- log.debug('Sharing image {} to {}'.format(image, token))
- cmd = ['openstack', '--os-cloud', os_cloud, 'image', 'add', 'project',
- image, token]
+ log.debug("Sharing image {} to {}".format(image, token))
+ cmd = ["openstack", "--os-cloud", os_cloud, "image", "add", "project", image, token]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
- log.debug('exit code: {}'.format(p.returncode))
- log.debug(stderr.decode('utf-8'))
+ log.debug("exit code: {}".format(p.returncode))
+ log.debug(stderr.decode("utf-8"))
if p.returncode:
- if stderr.decode('utf-8').startswith('409 Conflict'):
- log.info(' Image is already shared.')
+ if stderr.decode("utf-8").startswith("409 Conflict"):
+ log.info(" Image is already shared.")
else:
sys.exit(1)
def _accept_shared_image(cloud, image):
- log.debug('Accepting image {}'.format(image))
- cmd = ['openstack', '--os-cloud', cloud, 'image', 'set',
- '--accept', image]
+ log.debug("Accepting image {}".format(image))
+ cmd = ["openstack", "--os-cloud", cloud, "image", "set", "--accept", image]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
- log.debug('exit code: {}'.format(p.returncode))
- log.debug(stderr.decode('utf-8'))
+ log.debug("exit code: {}".format(p.returncode))
+ log.debug(stderr.decode("utf-8"))
if p.returncode:
sys.exit(1)
_mark_image_shared(os_cloud, image_id)
for cloud in clouds:
- log.info('Sharing to {}.'.format(cloud))
+ log.info("Sharing to {}.".format(cloud))
_share_to_cloud(os_cloud, image_id, _get_token(cloud))
_accept_shared_image(cloud, image_id)
-def upload(os_cloud, image, name, disk_format='qcow2'):
+def upload(os_cloud, image, name, disk_format="qcow2"):
"""Upload image to openstack."""
log.info('Uploading image {} with name "{}".'.format(image, name))
cloud = shade.openstack_cloud(cloud=os_cloud)
- if re.match(r'^http[s]?://', image):
- tmp = tempfile.NamedTemporaryFile(suffix='.img')
- log.info('URL provided downloading image locally to {}.'.format(tmp.name))
+ if re.match(r"^http[s]?://", image):
+ tmp = tempfile.NamedTemporaryFile(suffix=".img")
+ log.info("URL provided downloading image locally to {}.".format(tmp.name))
urllib.request.urlretrieve(image, tmp.name) # nosec
image = tmp.name
log.info(str(e))
sys.exit(1)
- log.info('Upload complete.')
+ log.info("Upload complete.")
##############################################################################
"""CLI configuration for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import click
##############################################################################
"""Object related sub-commands for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
import shade
##############################################################################
"""Server related sub-commands for openstack command."""
-__author__ = 'Anil Belur'
+__author__ = "Anil Belur"
from datetime import datetime
from datetime import timedelta
"""Filter server data and return list."""
filtered = []
for server in servers:
- if days and (
- datetime.strptime(server.created, '%Y-%m-%dT%H:%M:%SZ')
- >= datetime.now() - timedelta(days=days)):
+ if days and (datetime.strptime(server.created, "%Y-%m-%dT%H:%M:%SZ") >= datetime.now() - timedelta(days=days)):
continue
filtered.append(server)
:arg str os_cloud: Cloud name as defined in OpenStack clouds.yaml.
:arg int days: Filter servers that are older than number of days.
"""
+
def _remove_servers_from_cloud(servers, cloud):
- print('Removing {} servers from {}.'.format(len(servers), cloud.cloud_config.name))
+ print("Removing {} servers from {}.".format(len(servers), cloud.cloud_config.name))
for server in servers:
try:
result = cloud.delete_server(server.name)
except shade.exc.OpenStackCloudException as e:
- if str(e).startswith('Multiple matches found for'):
- print('WARNING: {}. Skipping server...'.format(str(e)))
+ if str(e).startswith("Multiple matches found for"):
+ print("WARNING: {}. Skipping server...".format(str(e)))
continue
else:
- print('ERROR: Unexpected exception: {}'.format(str(e)))
+ print("ERROR: Unexpected exception: {}".format(str(e)))
raise
if not result:
- print('WARNING: Failed to remove \"{}\" from {}. Possibly already deleted.'
- .format(server.name, cloud.cloud_config.name))
+ print(
+ 'WARNING: Failed to remove "{}" from {}. Possibly already deleted.'.format(
+ server.name, cloud.cloud_config.name
+ )
+ )
else:
print('Removed "{}" from {}.'.format(server.name, cloud.cloud_config.name))
print("ERROR: Server not found.")
sys.exit(1)
- if (datetime.strptime(server.created, '%Y-%m-%dT%H:%M:%SZ')
- >= datetime.utcnow() - timedelta(minutes=minutes)):
- print('WARN: Server "{}" is not older than {} minutes.'.format(
- server.name, minutes))
+ if datetime.strptime(server.created, "%Y-%m-%dT%H:%M:%SZ") >= datetime.utcnow() - timedelta(minutes=minutes):
+ print('WARN: Server "{}" is not older than {} minutes.'.format(server.name, minutes))
else:
cloud.delete_server(server.name)
##############################################################################
"""stack related sub-commands for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
from datetime import datetime
import json
cloud = shade.openstack_cloud(cloud=os_cloud)
stack_success = False
- print('Creating stack {}'.format(name))
+ print("Creating stack {}".format(name))
for i in range(tries):
try:
stack = cloud.create_stack(
- name,
- template_file=template_file,
- environment_files=[parameter_file],
- timeout=timeout,
- rollback=False)
+ name, template_file=template_file, environment_files=[parameter_file], timeout=timeout, rollback=False
+ )
except shade.exc.OpenStackCloudHTTPError as e:
if cloud.search_stacks(name):
- print('Stack with name {} already exists.'.format(name))
+ print("Stack with name {} already exists.".format(name))
else:
print(e)
sys.exit(1)
time.sleep(10)
stack = cloud.get_stack(stack_id)
- if stack.stack_status == 'CREATE_IN_PROGRESS':
- print('Waiting to initialize infrastructure...')
- elif stack.stack_status == 'CREATE_COMPLETE':
- print('Stack initialization successful.')
+ if stack.stack_status == "CREATE_IN_PROGRESS":
+ print("Waiting to initialize infrastructure...")
+ elif stack.stack_status == "CREATE_COMPLETE":
+ print("Stack initialization successful.")
stack_success = True
break
- elif stack.stack_status == 'CREATE_FAILED':
- print('WARN: Failed to initialize stack. Reason: {}'.format(
- stack.stack_status_reason))
+ elif stack.stack_status == "CREATE_FAILED":
+ print("WARN: Failed to initialize stack. Reason: {}".format(stack.stack_status_reason))
if delete(os_cloud, stack_id):
break
else:
- print('Unexpected status: {}'.format(stack.stack_status))
+ print("Unexpected status: {}".format(stack.stack_status))
if stack_success:
break
- print('------------------------------------')
- print('Stack Details')
- print('------------------------------------')
+ print("------------------------------------")
+ print("Stack Details")
+ print("------------------------------------")
cloud.pprint(stack)
- print('------------------------------------')
+ print("------------------------------------")
def cost(os_cloud, stack_name):
Return the cost in dollars & cents (x.xx).
"""
+
def get_server_cost(server_id):
flavor, seconds = get_server_info(server_id)
url = "https://pricing.vexxhost.net/v1/pricing/%s/cost?seconds=%d"
with urllib.request.urlopen(url % (flavor, seconds)) as response: # nosec
data = json.loads(response.read())
- return data['cost']
+ return data["cost"]
def parse_iso8601_time(time):
return datetime.strptime(time, "%Y-%m-%dT%H:%M:%S.%f")
def get_server_info(server_id):
server = cloud.compute.find_server(server_id)
- diff = (datetime.utcnow() - parse_iso8601_time(server.launched_at))
- return server.flavor['original_name'], diff.total_seconds()
+ diff = datetime.utcnow() - parse_iso8601_time(server.launched_at)
+ return server.flavor["original_name"], diff.total_seconds()
def get_server_ids(stack_name):
- servers = get_resources_by_type(stack_name, 'OS::Nova::Server')
- return [s['physical_resource_id'] for s in servers]
+ servers = get_resources_by_type(stack_name, "OS::Nova::Server")
+ return [s["physical_resource_id"] for s in servers]
def get_resources_by_type(stack_name, resource_type):
resources = get_stack_resources(stack_name)
resources = []
def _is_nested(resource):
- link_types = [l['rel'] for l in resource.links]
- if 'nested' in link_types:
+ link_types = [l["rel"] for l in resource.links]
+ if "nested" in link_types:
return True
return False
Return True if delete was successful.
"""
cloud = shade.openstack_cloud(cloud=os_cloud)
- print('Deleting stack {}'.format(name_or_id))
+ print("Deleting stack {}".format(name_or_id))
cloud.delete_stack(name_or_id)
t_end = time.time() + timeout
time.sleep(10)
stack = cloud.get_stack(name_or_id)
- if not stack or stack.stack_status == 'DELETE_COMPLETE':
- print('Successfully deleted stack {}'.format(name_or_id))
+ if not stack or stack.stack_status == "DELETE_COMPLETE":
+ print("Successfully deleted stack {}".format(name_or_id))
return True
- elif stack.stack_status == 'DELETE_IN_PROGRESS':
- print('Waiting for stack to delete...')
- elif stack.stack_status == 'DELETE_FAILED':
- print('WARN: Failed to delete $STACK_NAME. Reason: {}'.format(
- stack.stack_status_reason))
- print('Retrying delete...')
+ elif stack.stack_status == "DELETE_IN_PROGRESS":
+ print("Waiting for stack to delete...")
+ elif stack.stack_status == "DELETE_FAILED":
+ print("WARN: Failed to delete $STACK_NAME. Reason: {}".format(stack.stack_status_reason))
+ print("Retrying delete...")
cloud.delete_stack(name_or_id)
else:
- print('WARN: Unexpected delete status: {}'.format(
- stack.stack_status))
- print('Retrying delete...')
+ print("WARN: Unexpected delete status: {}".format(stack.stack_status))
+ print("Retrying delete...")
cloud.delete_stack(name_or_id)
- print('Failed to delete stack {}'.format(name_or_id))
+ print("Failed to delete stack {}".format(name_or_id))
if not force:
return False
cloud = shade.openstack_cloud(cloud=os_cloud)
stacks = cloud.search_stacks()
if not stacks:
- log.debug('No stacks to delete.')
+ log.debug("No stacks to delete.")
sys.exit(0)
builds = []
for server in jenkins_servers:
jenkins = Jenkins(server)
- jenkins_url = jenkins.url.rstrip('/')
- silo = jenkins_url.split('/')
+ jenkins_url = jenkins.url.rstrip("/")
+ silo = jenkins_url.split("/")
if len(silo) == 4: # https://jenkins.opendaylight.org/releng
silo = silo[3]
elif len(silo) == 3: # https://jenkins.onap.org
- silo = 'production'
+ silo = "production"
else:
- log.error('Unexpected URL pattern, could not detect silo.')
+ log.error("Unexpected URL pattern, could not detect silo.")
sys.exit(1)
- log.debug('Fetching running builds from {}'.format(jenkins_url))
+ log.debug("Fetching running builds from {}".format(jenkins_url))
running_builds = jenkins.server.get_running_builds()
for build in running_builds:
- build_name = '{}-{}-{}'.format(
- silo, build.get('name'), build.get('number'))
- log.debug(' {}'.format(build_name))
+ build_name = "{}-{}-{}".format(silo, build.get("name"), build.get("number"))
+ log.debug(" {}".format(build_name))
builds.append(build_name)
- log.debug('Active stacks')
+ log.debug("Active stacks")
for stack in stacks:
- if (stack.stack_status == 'CREATE_COMPLETE' or
- stack.stack_status == 'CREATE_FAILED' or
- stack.stack_status == 'DELETE_FAILED'):
- log.debug(' {}'.format(stack.stack_name))
-
- if stack.stack_status == 'DELETE_FAILED':
+ if (
+ stack.stack_status == "CREATE_COMPLETE"
+ or stack.stack_status == "CREATE_FAILED"
+ or stack.stack_status == "DELETE_FAILED"
+ ):
+ log.debug(" {}".format(stack.stack_name))
+
+ if stack.stack_status == "DELETE_FAILED":
cloud.pprint(stack)
if stack.stack_name not in builds:
- log.debug(' >>>> Marked for deletion <<<<')
+ log.debug(" >>>> Marked for deletion <<<<")
delete(os_cloud, stack.stack_name)
else:
##############################################################################
"""volume related sub-commands for openstack command."""
-__author__ = 'Thanh Ha'
+__author__ = "Thanh Ha"
from datetime import datetime
from datetime import timedelta
filtered = []
for volume in volumes:
if days and (
- datetime.strptime(volume.created_at, '%Y-%m-%dT%H:%M:%S.%f')
- >= datetime.now() - timedelta(days=days)):
+ datetime.strptime(volume.created_at, "%Y-%m-%dT%H:%M:%S.%f") >= datetime.now() - timedelta(days=days)
+ ):
continue
filtered.append(volume)
:arg str os_cloud: Cloud name as defined in OpenStack clouds.yaml.
:arg int days: Filter volumes that are older than number of days.
"""
+
def _remove_volumes_from_cloud(volumes, cloud):
- print('Removing {} volumes from {}.'.format(len(volumes), cloud.cloud_config.name))
+ print("Removing {} volumes from {}.".format(len(volumes), cloud.cloud_config.name))
for volume in volumes:
try:
result = cloud.delete_volume(volume.name)
except shade.exc.OpenStackCloudException as e:
- if str(e).startswith('Multiple matches found for'):
- print('WARNING: {}. Skipping volume...'.format(str(e)))
+ if str(e).startswith("Multiple matches found for"):
+ print("WARNING: {}. Skipping volume...".format(str(e)))
continue
else:
- print('ERROR: Unexpected exception: {}'.format(str(e)))
+ print("ERROR: Unexpected exception: {}".format(str(e)))
raise
if not result:
- print('WARNING: Failed to remove \"{}\" from {}. Possibly already deleted.'
- .format(volume.name, cloud.cloud_config.name))
+ print(
+ 'WARNING: Failed to remove "{}" from {}. Possibly already deleted.'.format(
+ volume.name, cloud.cloud_config.name
+ )
+ )
else:
print('Removed "{}" from {}.'.format(volume.name, cloud.cloud_config.name))
print("ERROR: volume not found.")
sys.exit(1)
- if (datetime.strptime(volume.created_at, '%Y-%m-%dT%H:%M:%S.%f')
- >= datetime.utcnow() - timedelta(minutes=minutes)):
- print('WARN: volume "{}" is not older than {} minutes.'.format(
- volume.name, minutes))
+ if datetime.strptime(volume.created_at, "%Y-%m-%dT%H:%M:%S.%f") >= datetime.utcnow() - timedelta(minutes=minutes):
+ print('WARN: volume "{}" is not older than {} minutes.'.format(volume.name, minutes))
else:
cloud.delete_volume(volume.id)
schema_file = yaml.safe_load(_)
# Load the schema
- validation = jsonschema.Draft4Validator(
- schema_file,
- format_checker=jsonschema.FormatChecker()
- )
+ validation = jsonschema.Draft4Validator(schema_file, format_checker=jsonschema.FormatChecker())
validation.iter_errors(yaml_file)
# Look for errors
##############################################################################
"""Functions for DCO check tasks."""
-__author__ = 'DW Talton'
+__author__ = "DW Talton"
import logging
from os import chdir
def get_branches(path=getcwd(), invert=False):
"""Get a list of branches."""
if invert:
- invert = '--invert-grep'
+ invert = "--invert-grep"
else:
- invert = ''
+ invert = ""
chdir(path)
try:
- branches = subprocess.check_output( # nosec
- "git branch -r | grep -v origin/HEAD", shell=True)\
- .decode(encoding="UTF-8") \
+ branches = (
+ subprocess.check_output("git branch -r | grep -v origin/HEAD", shell=True) # nosec
+ .decode(encoding="UTF-8")
.splitlines()
+ )
hashlist = []
for branch in branches:
branch = branch.strip()
- hashes = subprocess.check_output( # nosec
- 'git log {} --no-merges --pretty="%H %ae" --grep "Signed-off-by" {}' # noqa
- .format(branch, invert), shell=True)\
- .decode(encoding="UTF-8")\
- .split('\n')
+ hashes = (
+ subprocess.check_output( # nosec
+ 'git log {} --no-merges --pretty="%H %ae" --grep "Signed-off-by" {}'.format(branch, invert), # noqa
+ shell=True,
+ )
+ .decode(encoding="UTF-8")
+ .split("\n")
+ )
hashlist = hashlist + hashes
if hashlist:
# remove a trailing blank list entry
missing = []
for commit in hashes:
if commit:
- missing.append(commit.split(' ')[0])
+ missing.append(commit.split(" ")[0])
if missing:
# de-dupe the commit list
exit(exit_code)
else:
for commit in hashes:
- commit_id = commit.split(' ')[0]
+ commit_id = commit.split(" ")[0]
if commit_id:
commit_log_message = subprocess.check_output( # nosec
- "git log --format=%B -n 1 {}"
- .format(commit_id), shell=True)\
+ "git log --format=%B -n 1 {}".format(commit_id), shell=True
+ ).decode(encoding="UTF-8")
+ commit_author_email = (
+ subprocess.check_output("git log --format='%ae' {}^!".format(commit_id), shell=True) # nosec
.decode(encoding="UTF-8")
- commit_author_email = subprocess.check_output( # nosec
- "git log --format='%ae' {}^!"
- .format(commit_id), shell=True)\
- .decode(encoding="UTF-8").strip()
- sob_email_regex = '(?=Signed\-off\-by: )*[\<](.*)[\>]' # noqa
- sob_results = re.findall(sob_email_regex,
- commit_log_message)
+ .strip()
+ )
+ sob_email_regex = "(?=Signed\-off\-by: )*[\<](.*)[\>]" # noqa
+ sob_results = re.findall(sob_email_regex, commit_log_message)
if commit_author_email in sob_results:
continue
else:
- log.info("For commit ID {}: \n\tCommitter is {}"
- "\n\tbut commit is signed off by {}\n"
- .format(commit_id,
- commit_author_email,
- sob_results))
+ log.info(
+ "For commit ID {}: \n\tCommitter is {}"
+ "\n\tbut commit is signed off by {}\n".format(commit_id, commit_author_email, sob_results)
+ )
exit_code = 1
exit(exit_code)
except subprocess.CalledProcessError as e:
from setuptools import setup
-with open('requirements.txt') as f:
+with open("requirements.txt") as f:
install_reqs = f.read().splitlines()
-with open('requirements-test.txt') as f:
+with open("requirements-test.txt") as f:
f.readline() # Skip the first -rrequirements.txt line
test_reqs = f.read().splitlines()
setup(
- setup_requires=['pbr', 'pytest-runner'],
+ setup_requires=["pbr", "pytest-runner"],
pbr=True,
install_requires=install_reqs,
- packages=find_packages(exclude=[
- '*.tests',
- '*.tests.*',
- 'tests.*',
- 'tests'
- ]),
+ packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
tests_require=test_reqs,
)
import lftools.api.client as client
-creds = {
- 'authtype': 'token',
- 'endpoint': '',
- 'token': 'xyz'
-}
+creds = {"authtype": "token", "endpoint": "", "token": "xyz"}
c = client.RestApi(creds=creds)
@responses.activate
def test_get():
- responses.add(responses.GET, 'https://fakeurl/', json={'success': 'get'},
- status=200, match_querystring=True)
- resp = c.get('https://fakeurl/')
- assert resp[1] == {'success': 'get'}
+ responses.add(responses.GET, "https://fakeurl/", json={"success": "get"}, status=200, match_querystring=True)
+ resp = c.get("https://fakeurl/")
+ assert resp[1] == {"success": "get"}
@responses.activate
def test_patch():
- responses.add(responses.PATCH, url='https://fakeurl/',
- json={'success': 'patch'}, status=204,
- match_querystring=True)
- resp = c.patch('https://fakeurl/')
- assert resp[1] == {'success': 'patch'}
+ responses.add(
+ responses.PATCH, url="https://fakeurl/", json={"success": "patch"}, status=204, match_querystring=True
+ )
+ resp = c.patch("https://fakeurl/")
+ assert resp[1] == {"success": "patch"}
@responses.activate
def test_post():
- responses.add(responses.POST, 'https://fakeurl/', json={'success': 'post'},
- status=201, match_querystring=True)
- resp = c.post('https://fakeurl/')
- assert resp[1] == {'success': 'post'}
+ responses.add(responses.POST, "https://fakeurl/", json={"success": "post"}, status=201, match_querystring=True)
+ resp = c.post("https://fakeurl/")
+ assert resp[1] == {"success": "post"}
@responses.activate
def test_put():
- responses.add(responses.PUT, 'https://fakeurl/', json={'success': 'put'},
- status=200, match_querystring=True)
- resp = c.put('https://fakeurl/')
- assert resp[1] == {'success': 'put'}
+ responses.add(responses.PUT, "https://fakeurl/", json={"success": "put"}, status=200, match_querystring=True)
+ resp = c.put("https://fakeurl/")
+ assert resp[1] == {"success": "put"}
@responses.activate
def test_delete():
- responses.add(responses.DELETE, 'https://fakeurl/',
- json={'success': 'delete'}, status=200,
- match_querystring=True)
- resp = c.delete('https://fakeurl/')
- assert resp[1] == {'success': 'delete'}
+ responses.add(responses.DELETE, "https://fakeurl/", json={"success": "delete"}, status=200, match_querystring=True)
+ resp = c.delete("https://fakeurl/")
+ assert resp[1] == {"success": "delete"}
import lftools.deploy as deploy_sys
-FIXTURE_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'fixtures',
- )
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
def test_log_and_exit():
def test_format_url():
"""Test url format."""
- test_url=[["192.168.1.1", "http://192.168.1.1"],
- ["192.168.1.1:8081", "http://192.168.1.1:8081"],
- ["192.168.1.1:8081/nexus", "http://192.168.1.1:8081/nexus"],
- ["192.168.1.1:8081/nexus/", "http://192.168.1.1:8081/nexus"],
- ["http://192.168.1.1:8081/nexus", "http://192.168.1.1:8081/nexus"],
- ["https://192.168.1.1:8081/nexus", "https://192.168.1.1:8081/nexus"],
- ["https://192.168.1.1:8081/nexus/", "https://192.168.1.1:8081/nexus"],
- ["www.goodnexussite.org:8081", "http://www.goodnexussite.org:8081"],
- ["192.168.1.1:8081/nexus///", "http://192.168.1.1:8081/nexus"]]
+ test_url = [
+ ["192.168.1.1", "http://192.168.1.1"],
+ ["192.168.1.1:8081", "http://192.168.1.1:8081"],
+ ["192.168.1.1:8081/nexus", "http://192.168.1.1:8081/nexus"],
+ ["192.168.1.1:8081/nexus/", "http://192.168.1.1:8081/nexus"],
+ ["http://192.168.1.1:8081/nexus", "http://192.168.1.1:8081/nexus"],
+ ["https://192.168.1.1:8081/nexus", "https://192.168.1.1:8081/nexus"],
+ ["https://192.168.1.1:8081/nexus/", "https://192.168.1.1:8081/nexus"],
+ ["www.goodnexussite.org:8081", "http://www.goodnexussite.org:8081"],
+ ["192.168.1.1:8081/nexus///", "http://192.168.1.1:8081/nexus"],
+ ]
for url in test_url:
assert deploy_sys._format_url(url[0]) == url[1]
assert excinfo.type == SystemExit
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_copy_archive_dir(cli_runner, datafiles):
"""Test copy_archives() command to ensure archives dir is copied."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace')
+ workspace_dir = os.path.join(str(datafiles), "workspace")
stage_dir = str(datafiles.mkdir("stage_archive"))
os.chdir(stage_dir)
- result = cli_runner.invoke(
- cli.cli,
- ['--debug', 'deploy', 'copy-archives', workspace_dir],
- obj={})
+ result = cli_runner.invoke(cli.cli, ["--debug", "deploy", "copy-archives", workspace_dir], obj={})
assert result.exit_code == 0
- assert os.path.exists(os.path.join(stage_dir, 'test.log'))
+ assert os.path.exists(os.path.join(stage_dir, "test.log"))
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_copy_archive_pattern(cli_runner, datafiles):
"""Test copy_archives() command to ensure glob patterns are copied."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace')
+ workspace_dir = os.path.join(str(datafiles), "workspace")
stage_dir = str(datafiles.mkdir("stage_archive"))
os.chdir(stage_dir)
- result = cli_runner.invoke(
- cli.cli,
- ['--debug', 'deploy', 'copy-archives', workspace_dir, '**/*.txt'],
- obj={})
+ result = cli_runner.invoke(cli.cli, ["--debug", "deploy", "copy-archives", workspace_dir, "**/*.txt"], obj={})
assert result.exit_code == 0
- assert os.path.exists(os.path.join(stage_dir, 'test.log'))
- assert os.path.exists(os.path.join(stage_dir, 'abc.txt'))
- assert not os.path.exists(os.path.join(stage_dir, 'dependencies.log'))
- assert os.path.exists(os.path.join(
- stage_dir, 'aaa', 'aaa-cert', 'target', 'surefire-reports',
- 'org.opendaylight.aaa.cert.test.AaaCertMdsalProviderTest-output.txt'))
+ assert os.path.exists(os.path.join(stage_dir, "test.log"))
+ assert os.path.exists(os.path.join(stage_dir, "abc.txt"))
+ assert not os.path.exists(os.path.join(stage_dir, "dependencies.log"))
+ assert os.path.exists(
+ os.path.join(
+ stage_dir,
+ "aaa",
+ "aaa-cert",
+ "target",
+ "surefire-reports",
+ "org.opendaylight.aaa.cert.test.AaaCertMdsalProviderTest-output.txt",
+ )
+ )
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_archive(cli_runner, datafiles, responses):
"""Test deploy_archives() command for expected upload cases."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace')
+ workspace_dir = os.path.join(str(datafiles), "workspace")
# Test successful upload
- url = 'https://nexus.example.org/service/local/repositories/logs/content-compressed'
- responses.add(responses.POST, '{}/test/path/abc'.format(url),
- json=None, status=201)
+ url = "https://nexus.example.org/service/local/repositories/logs/content-compressed"
+ responses.add(responses.POST, "{}/test/path/abc".format(url), json=None, status=201)
result = cli_runner.invoke(
- cli.cli,
- ['--debug', 'deploy', 'archives', 'https://nexus.example.org', 'test/path/abc', workspace_dir],
- obj={})
+ cli.cli, ["--debug", "deploy", "archives", "https://nexus.example.org", "test/path/abc", workspace_dir], obj={}
+ )
assert result.exit_code == 0
# Test failed upload
- url = 'https://nexus-fail.example.org/service/local/repositories/logs/content-compressed'
- responses.add(responses.POST, '{}/test/fail/path'.format(url),
- status=404)
+ url = "https://nexus-fail.example.org/service/local/repositories/logs/content-compressed"
+ responses.add(responses.POST, "{}/test/fail/path".format(url), status=404)
result = cli_runner.invoke(
cli.cli,
- ['--debug', 'deploy', 'archives', 'https://nexus-fail.example.org', 'test/fail/path', workspace_dir],
- obj={})
+ ["--debug", "deploy", "archives", "https://nexus-fail.example.org", "test/fail/path", workspace_dir],
+ obj={},
+ )
assert result.exit_code == 1
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_archive2(datafiles):
"""Test deploy_archives() command when archives dir is missing."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace-noarchives')
+ workspace_dir = os.path.join(str(datafiles), "workspace-noarchives")
with pytest.raises(OSError) as excinfo:
- deploy_sys.copy_archives(workspace_dir)
+ deploy_sys.copy_archives(workspace_dir)
assert workspace_dir in str(excinfo.value)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_archive3(datafiles):
"""Test deploy_archives() command when archives dir is a file instead of a dir."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace-archivesfile')
+ workspace_dir = os.path.join(str(datafiles), "workspace-archivesfile")
with pytest.raises(OSError) as excinfo:
- deploy_sys.copy_archives(workspace_dir)
+ deploy_sys.copy_archives(workspace_dir)
assert workspace_dir in str(excinfo.value)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_archive4(cli_runner, datafiles, responses):
"""Test deploy_archives() command when using duplicated patterns."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace-patternfile')
- pattern=["**/*.log", "**/hs_err_*.log", "**/target/**/feature.xml", "**/target/failsafe-reports/failsafe-summary.xml", "**/target/surefire-reports/*-output.txt", "**/target/surefire-reports/*-output.txt", "**/target/failsafe-reports/failsafe-summary.xml", "**/*"]
+ workspace_dir = os.path.join(str(datafiles), "workspace-patternfile")
+ pattern = [
+ "**/*.log",
+ "**/hs_err_*.log",
+ "**/target/**/feature.xml",
+ "**/target/failsafe-reports/failsafe-summary.xml",
+ "**/target/surefire-reports/*-output.txt",
+ "**/target/surefire-reports/*-output.txt",
+ "**/target/failsafe-reports/failsafe-summary.xml",
+ "**/*",
+ ]
result = deploy_sys.copy_archives(workspace_dir, pattern)
assert result is None
def test_remove_duplicates_and_sort():
- test_lst = [[["file1"],
- ["file1"]],
-
- [["file1", "file2"],
- ["file1", "file2"]],
-
- [["file2", "file3", "file5", "file1", "file4"],
- ["file1", "file2", "file3", "file4", "file5"]],
-
- [["file2", "file3", "file2", "file3", "file4"],
- ["file2", "file3", "file4"]],
-
-
- [["**/*.log",
+ test_lst = [
+ [["file1"], ["file1"]],
+ [["file1", "file2"], ["file1", "file2"]],
+ [["file2", "file3", "file5", "file1", "file4"], ["file1", "file2", "file3", "file4", "file5"]],
+ [["file2", "file3", "file2", "file3", "file4"], ["file2", "file3", "file4"]],
+ [
+ [
+ "**/*.log",
"**/hs_err_*.log",
"**/target/**/feature.xml",
"**/target/failsafe-reports/failsafe-summary.xml",
"**/target/surefire-reports/*-output.txt",
"**/target/surefire-reports/*-output.txt",
- "**/target/failsafe-reports/failsafe-summary.xml"],
-
- ["**/*.log",
+ "**/target/failsafe-reports/failsafe-summary.xml",
+ ],
+ [
+ "**/*.log",
"**/hs_err_*.log",
"**/target/**/feature.xml",
"**/target/failsafe-reports/failsafe-summary.xml",
- "**/target/surefire-reports/*-output.txt"]],
-
- [['/workspace-patternfile/abc.log',
- '/workspace-patternfile/dir1/hs_err_13.log',
- '/workspace-patternfile/dir1/hs_err_12.log',
- '/workspace-patternfile/dir1/abc.log',
- '/workspace-patternfile/dir2/hs_err_13.log',
- '/workspace-patternfile/dir2/hs_err_12.log',
- '/workspace-patternfile/dir2/abc.log',
- '/workspace-patternfile/dir1/hs_err_13.log',
- '/workspace-patternfile/dir1/hs_err_12.log',
- '/workspace-patternfile/dir2/hs_err_13.log',
- '/workspace-patternfile/dir2/hs_err_12.log',
- '/workspace-patternfile/target/dir1/feature.xml',
- '/workspace-patternfile/target/dir2/feature.xml',
- '/workspace-patternfile/target/surefire-reports/abc1-output.txt',
- '/workspace-patternfile/target/surefire-reports/abc2-output.txt',
- '/workspace-patternfile/target/surefire-reports/abc1-output.txt',
- '/workspace-patternfile/target/surefire-reports/abc2-output.txt'],
-
- ['/workspace-patternfile/abc.log',
- '/workspace-patternfile/dir1/abc.log',
- '/workspace-patternfile/dir1/hs_err_12.log',
- '/workspace-patternfile/dir1/hs_err_13.log',
- '/workspace-patternfile/dir2/abc.log',
- '/workspace-patternfile/dir2/hs_err_12.log',
- '/workspace-patternfile/dir2/hs_err_13.log',
- '/workspace-patternfile/target/dir1/feature.xml',
- '/workspace-patternfile/target/dir2/feature.xml',
- '/workspace-patternfile/target/surefire-reports/abc1-output.txt',
- '/workspace-patternfile/target/surefire-reports/abc2-output.txt']],
-
- [['work/results/repo/repodata',
- 'work/results/repo/repodata/aef510f1572d6c8dd2d245640911934f51dca895d037dc137c3fe343b26ffe2a-other.sqlite.bz2',
- 'work/results/repo/repodata/8370c06da1e72e3186f5bd1bd7d04fb772883959de7973d9b6964322415f2f4f-other.xml.gz',
- 'work/results/repo/repodata/11299388173a685dda16ffa5e8e5993e8e32d513b1f93e11ae4bf38ac3623ff7-filelists.sqlite.bz2',
- 'work/results/repo/repodata/47b4a63805b1d3101f24281ed4237284c48ebc1d423092c742479438353e9a79-filelists.xml.gz',
- 'work/results/repo/repodata/224b2e07d395b282569c3ed5341f4fdc7ba2df3d9236117358d98e9f88667fdb-primary.sqlite.bz2',
- 'work/results/repo/repodata/ae2ac51511d3d99570bbe380deffd2e88043b93dafea81ece1ebae7b6dbb9f35-primary.xml.gz',
- 'work/results/repo/repodata/repomd.xml',
- 'work/results/src_repo',
- 'work/results/src_repo/product-manifest-95-1.el7.centos.ta.src.rpm',
- 'work/results/src_repo/repodata',
- 'work/results/src_repo/repodata/103971d7e000d6d79bfdce8a6ee2acb9d9f9ea70db181d6399ebe7fc1df60cbb-other.sqlite.bz2',
- 'work/results/src_repo/repodata/2fadeaa73aa6313afb359828628bc661c4fc686d82a0e2acba6d93bdd3bd32b8-other.xml.gz',
- 'work/results/src_repo/repodata/28c69dfda86e6dd2d612e21efad415feff1ef44718a475b58d4e2e345fc22f82-filelists.sqlite.bz2',
- 'work/results/src_repo/repodata/a19a91350de47d15d147c12aebe1aa4682e4733edc14719de09eaee8793c1080-filelists.xml.gz',
- 'work/results/src_repo/repodata/6cc8efe401cb22a8e07934d93ef6214fef91175130b2a8c1286161a7bf504a5a-primary.sqlite.bz2',
- 'work/results/src_repo/repodata/43cc7ddec49d87af8e8b78c6ec2c3c8c9bf57d8a0723e3950266cd0440147af4-primary.xml.gz',
- 'work/results/src_repo/repodata/repomd.xml'],
-
- ['work/results/repo/repodata',
- 'work/results/repo/repodata/11299388173a685dda16ffa5e8e5993e8e32d513b1f93e11ae4bf38ac3623ff7-filelists.sqlite.bz2',
- 'work/results/repo/repodata/224b2e07d395b282569c3ed5341f4fdc7ba2df3d9236117358d98e9f88667fdb-primary.sqlite.bz2',
- 'work/results/repo/repodata/47b4a63805b1d3101f24281ed4237284c48ebc1d423092c742479438353e9a79-filelists.xml.gz',
- 'work/results/repo/repodata/8370c06da1e72e3186f5bd1bd7d04fb772883959de7973d9b6964322415f2f4f-other.xml.gz',
- 'work/results/repo/repodata/ae2ac51511d3d99570bbe380deffd2e88043b93dafea81ece1ebae7b6dbb9f35-primary.xml.gz',
- 'work/results/repo/repodata/aef510f1572d6c8dd2d245640911934f51dca895d037dc137c3fe343b26ffe2a-other.sqlite.bz2',
- 'work/results/repo/repodata/repomd.xml',
- 'work/results/src_repo',
- 'work/results/src_repo/product-manifest-95-1.el7.centos.ta.src.rpm',
- 'work/results/src_repo/repodata',
- 'work/results/src_repo/repodata/103971d7e000d6d79bfdce8a6ee2acb9d9f9ea70db181d6399ebe7fc1df60cbb-other.sqlite.bz2',
- 'work/results/src_repo/repodata/28c69dfda86e6dd2d612e21efad415feff1ef44718a475b58d4e2e345fc22f82-filelists.sqlite.bz2',
- 'work/results/src_repo/repodata/2fadeaa73aa6313afb359828628bc661c4fc686d82a0e2acba6d93bdd3bd32b8-other.xml.gz',
- 'work/results/src_repo/repodata/43cc7ddec49d87af8e8b78c6ec2c3c8c9bf57d8a0723e3950266cd0440147af4-primary.xml.gz',
- 'work/results/src_repo/repodata/6cc8efe401cb22a8e07934d93ef6214fef91175130b2a8c1286161a7bf504a5a-primary.sqlite.bz2',
- 'work/results/src_repo/repodata/a19a91350de47d15d147c12aebe1aa4682e4733edc14719de09eaee8793c1080-filelists.xml.gz',
- 'work/results/src_repo/repodata/repomd.xml']]
-
-
-
- ]
+ "**/target/surefire-reports/*-output.txt",
+ ],
+ ],
+ [
+ [
+ "/workspace-patternfile/abc.log",
+ "/workspace-patternfile/dir1/hs_err_13.log",
+ "/workspace-patternfile/dir1/hs_err_12.log",
+ "/workspace-patternfile/dir1/abc.log",
+ "/workspace-patternfile/dir2/hs_err_13.log",
+ "/workspace-patternfile/dir2/hs_err_12.log",
+ "/workspace-patternfile/dir2/abc.log",
+ "/workspace-patternfile/dir1/hs_err_13.log",
+ "/workspace-patternfile/dir1/hs_err_12.log",
+ "/workspace-patternfile/dir2/hs_err_13.log",
+ "/workspace-patternfile/dir2/hs_err_12.log",
+ "/workspace-patternfile/target/dir1/feature.xml",
+ "/workspace-patternfile/target/dir2/feature.xml",
+ "/workspace-patternfile/target/surefire-reports/abc1-output.txt",
+ "/workspace-patternfile/target/surefire-reports/abc2-output.txt",
+ "/workspace-patternfile/target/surefire-reports/abc1-output.txt",
+ "/workspace-patternfile/target/surefire-reports/abc2-output.txt",
+ ],
+ [
+ "/workspace-patternfile/abc.log",
+ "/workspace-patternfile/dir1/abc.log",
+ "/workspace-patternfile/dir1/hs_err_12.log",
+ "/workspace-patternfile/dir1/hs_err_13.log",
+ "/workspace-patternfile/dir2/abc.log",
+ "/workspace-patternfile/dir2/hs_err_12.log",
+ "/workspace-patternfile/dir2/hs_err_13.log",
+ "/workspace-patternfile/target/dir1/feature.xml",
+ "/workspace-patternfile/target/dir2/feature.xml",
+ "/workspace-patternfile/target/surefire-reports/abc1-output.txt",
+ "/workspace-patternfile/target/surefire-reports/abc2-output.txt",
+ ],
+ ],
+ [
+ [
+ "work/results/repo/repodata",
+ "work/results/repo/repodata/aef510f1572d6c8dd2d245640911934f51dca895d037dc137c3fe343b26ffe2a-other.sqlite.bz2",
+ "work/results/repo/repodata/8370c06da1e72e3186f5bd1bd7d04fb772883959de7973d9b6964322415f2f4f-other.xml.gz",
+ "work/results/repo/repodata/11299388173a685dda16ffa5e8e5993e8e32d513b1f93e11ae4bf38ac3623ff7-filelists.sqlite.bz2",
+ "work/results/repo/repodata/47b4a63805b1d3101f24281ed4237284c48ebc1d423092c742479438353e9a79-filelists.xml.gz",
+ "work/results/repo/repodata/224b2e07d395b282569c3ed5341f4fdc7ba2df3d9236117358d98e9f88667fdb-primary.sqlite.bz2",
+ "work/results/repo/repodata/ae2ac51511d3d99570bbe380deffd2e88043b93dafea81ece1ebae7b6dbb9f35-primary.xml.gz",
+ "work/results/repo/repodata/repomd.xml",
+ "work/results/src_repo",
+ "work/results/src_repo/product-manifest-95-1.el7.centos.ta.src.rpm",
+ "work/results/src_repo/repodata",
+ "work/results/src_repo/repodata/103971d7e000d6d79bfdce8a6ee2acb9d9f9ea70db181d6399ebe7fc1df60cbb-other.sqlite.bz2",
+ "work/results/src_repo/repodata/2fadeaa73aa6313afb359828628bc661c4fc686d82a0e2acba6d93bdd3bd32b8-other.xml.gz",
+ "work/results/src_repo/repodata/28c69dfda86e6dd2d612e21efad415feff1ef44718a475b58d4e2e345fc22f82-filelists.sqlite.bz2",
+ "work/results/src_repo/repodata/a19a91350de47d15d147c12aebe1aa4682e4733edc14719de09eaee8793c1080-filelists.xml.gz",
+ "work/results/src_repo/repodata/6cc8efe401cb22a8e07934d93ef6214fef91175130b2a8c1286161a7bf504a5a-primary.sqlite.bz2",
+ "work/results/src_repo/repodata/43cc7ddec49d87af8e8b78c6ec2c3c8c9bf57d8a0723e3950266cd0440147af4-primary.xml.gz",
+ "work/results/src_repo/repodata/repomd.xml",
+ ],
+ [
+ "work/results/repo/repodata",
+ "work/results/repo/repodata/11299388173a685dda16ffa5e8e5993e8e32d513b1f93e11ae4bf38ac3623ff7-filelists.sqlite.bz2",
+ "work/results/repo/repodata/224b2e07d395b282569c3ed5341f4fdc7ba2df3d9236117358d98e9f88667fdb-primary.sqlite.bz2",
+ "work/results/repo/repodata/47b4a63805b1d3101f24281ed4237284c48ebc1d423092c742479438353e9a79-filelists.xml.gz",
+ "work/results/repo/repodata/8370c06da1e72e3186f5bd1bd7d04fb772883959de7973d9b6964322415f2f4f-other.xml.gz",
+ "work/results/repo/repodata/ae2ac51511d3d99570bbe380deffd2e88043b93dafea81ece1ebae7b6dbb9f35-primary.xml.gz",
+ "work/results/repo/repodata/aef510f1572d6c8dd2d245640911934f51dca895d037dc137c3fe343b26ffe2a-other.sqlite.bz2",
+ "work/results/repo/repodata/repomd.xml",
+ "work/results/src_repo",
+ "work/results/src_repo/product-manifest-95-1.el7.centos.ta.src.rpm",
+ "work/results/src_repo/repodata",
+ "work/results/src_repo/repodata/103971d7e000d6d79bfdce8a6ee2acb9d9f9ea70db181d6399ebe7fc1df60cbb-other.sqlite.bz2",
+ "work/results/src_repo/repodata/28c69dfda86e6dd2d612e21efad415feff1ef44718a475b58d4e2e345fc22f82-filelists.sqlite.bz2",
+ "work/results/src_repo/repodata/2fadeaa73aa6313afb359828628bc661c4fc686d82a0e2acba6d93bdd3bd32b8-other.xml.gz",
+ "work/results/src_repo/repodata/43cc7ddec49d87af8e8b78c6ec2c3c8c9bf57d8a0723e3950266cd0440147af4-primary.xml.gz",
+ "work/results/src_repo/repodata/6cc8efe401cb22a8e07934d93ef6214fef91175130b2a8c1286161a7bf504a5a-primary.sqlite.bz2",
+ "work/results/src_repo/repodata/a19a91350de47d15d147c12aebe1aa4682e4733edc14719de09eaee8793c1080-filelists.xml.gz",
+ "work/results/src_repo/repodata/repomd.xml",
+ ],
+ ],
+ ]
for tst in test_lst:
assert deploy_sys._remove_duplicates_and_sort(tst[0]) == tst[1]
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_logs(cli_runner, datafiles, responses):
"""Test deploy_logs() command for expected upload cases."""
os.chdir(str(datafiles))
- workspace_dir = os.path.join(str(datafiles), 'workspace')
+ workspace_dir = os.path.join(str(datafiles), "workspace")
# Test successful upload
- build_url = 'https://jenkins.example.org/job/builder-check-poms/204'
- nexus_url = 'https://nexus.example.org/service/local/repositories/logs/content-compressed'
- responses.add(responses.GET, '{}/consoleText'.format(build_url),
- status=201)
- responses.add(responses.GET, '{}/timestamps?time=HH:mm:ss&appendLog'.format(build_url),
- body='This is a console timestamped log.', status=201)
- responses.add(responses.POST, '{}/test/log/upload'.format(nexus_url), status=201)
+ build_url = "https://jenkins.example.org/job/builder-check-poms/204"
+ nexus_url = "https://nexus.example.org/service/local/repositories/logs/content-compressed"
+ responses.add(responses.GET, "{}/consoleText".format(build_url), status=201)
+ responses.add(
+ responses.GET,
+ "{}/timestamps?time=HH:mm:ss&appendLog".format(build_url),
+ body="This is a console timestamped log.",
+ status=201,
+ )
+ responses.add(responses.POST, "{}/test/log/upload".format(nexus_url), status=201)
result = cli_runner.invoke(
- cli.cli,
- ['--debug', 'deploy', 'logs', 'https://nexus.example.org', 'test/log/upload', build_url],
- obj={})
+ cli.cli, ["--debug", "deploy", "logs", "https://nexus.example.org", "test/log/upload", build_url], obj={}
+ )
assert result.exit_code == 0
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_nexus_zip(cli_runner, datafiles, responses):
os.chdir(str(datafiles))
- nexus_url = 'https://nexus.example.org'
- nexus_repo = 'test-repo'
- nexus_path = 'test/path'
+ nexus_url = "https://nexus.example.org"
+ nexus_repo = "test-repo"
+ nexus_path = "test/path"
# Test success
- success_upload_url = '{}/service/local/repositories/{}/content-compressed/{}'.format(
- nexus_url,
- nexus_repo,
- nexus_path,
+ success_upload_url = "{}/service/local/repositories/{}/content-compressed/{}".format(
+ nexus_url, nexus_repo, nexus_path,
)
- responses.add(responses.POST, success_upload_url,
- status=201)
+ responses.add(responses.POST, success_upload_url, status=201)
result = cli_runner.invoke(
cli.cli,
- ['--debug', 'deploy', 'nexus-zip', 'https://nexus.example.org', 'test-repo', 'test/path', 'zip-test-files/test.zip'],
- obj={})
+ [
+ "--debug",
+ "deploy",
+ "nexus-zip",
+ "https://nexus.example.org",
+ "test-repo",
+ "test/path",
+ "zip-test-files/test.zip",
+ ],
+ obj={},
+ )
assert result.exit_code == 0
# Test repository 404
</body>
</html>
"""
- upload_404_url = '{}/service/local/repositories/{}/content-compressed/{}'.format(
- nexus_url,
- 'logs2',
- nexus_path,
- )
- responses.add(responses.POST, upload_404_url,
- body=upload_404, status=404)
+ upload_404_url = "{}/service/local/repositories/{}/content-compressed/{}".format(nexus_url, "logs2", nexus_path,)
+ responses.add(responses.POST, upload_404_url, body=upload_404, status=404)
result = cli_runner.invoke(
cli.cli,
- ['--debug', 'deploy', 'nexus-zip', 'https://nexus.example.org', 'logs2', 'test/path', 'zip-test-files/test.zip'],
- obj={})
+ [
+ "--debug",
+ "deploy",
+ "nexus-zip",
+ "https://nexus.example.org",
+ "logs2",
+ "test/path",
+ "zip-test-files/test.zip",
+ ],
+ obj={},
+ )
assert result.exit_code == 1
</slide>
</slideshow>
"""
- assert deploy_sys._get_node_from_xml(document, 'stagedRepositoryId') == '432'
+ assert deploy_sys._get_node_from_xml(document, "stagedRepositoryId") == "432"
with pytest.raises(SystemExit) as excinfo:
- deploy_sys._get_node_from_xml(document, 'NotFoundTag')
+ deploy_sys._get_node_from_xml(document, "NotFoundTag")
assert excinfo.type == SystemExit
"""Mock _log_error_and_exit function.
This function is modified to simply raise an Exception.
The original will print msg1 & msg2, then call sys.exit(1)."""
- msg1=msg_list[0]
- if 'Could not connect to URL:' in msg1:
- raise ValueError('connection_error')
- if 'Invalid URL:' in msg1:
- raise ValueError('invalid_url')
- if 'Not valid URL:' in msg1:
- raise ValueError('missing_schema')
+ msg1 = msg_list[0]
+ if "Could not connect to URL:" in msg1:
+ raise ValueError("connection_error")
+ if "Invalid URL:" in msg1:
+ raise ValueError("invalid_url")
+ if "Not valid URL:" in msg1:
+ raise ValueError("missing_schema")
if "profile with id 'INVALID' does not exist" in msg1:
- raise ValueError('profile.id.not.exist')
+ raise ValueError("profile.id.not.exist")
if "OTHER create error" in msg1:
- raise ValueError('other.create.error')
+ raise ValueError("other.create.error")
if "HTTP method POST is not supported by this URL" in msg1:
- raise ValueError('post.not.supported')
+ raise ValueError("post.not.supported")
if "Did not find nexus site" in msg1:
- raise ValueError('site.not.found')
+ raise ValueError("site.not.found")
if "Failed with status code " in msg1:
- raise ValueError('other.error.occured')
+ raise ValueError("other.error.occured")
if "Staging repository do not exist." in msg1:
- raise ValueError('missing.staging.repository')
+ raise ValueError("missing.staging.repository")
if "Staging repository is already closed." in msg1:
- raise ValueError('staging.already.closed')
- raise ValueError('fail')
+ raise ValueError("staging.already.closed")
+ raise ValueError("fail")
def test__request_post(responses, mocker):
"""Test _request_post."""
- mocker.patch('lftools.deploy._log_error_and_exit', side_effect=mocked_log_error)
- xml_doc="""
+ mocker.patch("lftools.deploy._log_error_and_exit", side_effect=mocked_log_error)
+ xml_doc = """
<promoteRequest><data>
<stagedRepositoryId>test1-1027</stagedRepositoryId>
<description>Close staging repository.</description>
</data></promoteRequest>
"""
- headers = {'Content-Type': 'application/xml'}
+ headers = {"Content-Type": "application/xml"}
- test_url='http://connection.error.test'
+ test_url = "http://connection.error.test"
exception = requests.exceptions.ConnectionError(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(ValueError) as excinfo:
deploy_sys._request_post(test_url, xml_doc, headers)
- assert 'connection_error' in str(excinfo.value)
+ assert "connection_error" in str(excinfo.value)
- test_url='http://invalid.url.test:8081'
+ test_url = "http://invalid.url.test:8081"
exception = requests.exceptions.InvalidURL(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(ValueError) as excinfo:
deploy_sys._request_post(test_url, xml_doc, headers)
- assert 'invalid_url' in str(excinfo.value)
+ assert "invalid_url" in str(excinfo.value)
- test_url='http://missing.schema.test:8081'
+ test_url = "http://missing.schema.test:8081"
exception = requests.exceptions.MissingSchema(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(ValueError) as excinfo:
deploy_sys._request_post(test_url, xml_doc, headers)
- assert 'missing_schema' in str(excinfo.value)
+ assert "missing_schema" in str(excinfo.value)
+
def test__request_post_file(responses, mocker):
"""Test _request_post_file."""
- zip_file='zip-test-files/test.zip'
+ zip_file = "zip-test-files/test.zip"
resp = {}
- test_url='http://connection.error.test'
+ test_url = "http://connection.error.test"
exception = requests.exceptions.ConnectionError(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Could not connect to URL' in str(excinfo.value)
+ assert "Could not connect to URL" in str(excinfo.value)
- test_url='http://invalid.url.test:8081'
+ test_url = "http://invalid.url.test:8081"
exception = requests.exceptions.InvalidURL(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Invalid URL' in str(excinfo.value)
+ assert "Invalid URL" in str(excinfo.value)
- test_url='http://missing.schema.test:8081'
+ test_url = "http://missing.schema.test:8081"
exception = requests.exceptions.MissingSchema(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Not valid URL' in str(excinfo.value)
+ assert "Not valid URL" in str(excinfo.value)
- test_url='http://repository.read.only:8081'
+ test_url = "http://repository.read.only:8081"
responses.add(responses.POST, test_url, body=None, status=400)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Repository is read only' in str(excinfo.value)
+ assert "Repository is read only" in str(excinfo.value)
- test_url='http://repository.not.found:8081'
+ test_url = "http://repository.not.found:8081"
responses.add(responses.POST, test_url, body=None, status=404)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Did not find repository' in str(excinfo.value)
+ assert "Did not find repository" in str(excinfo.value)
- test_url='http://other.upload.error:8081'
+ test_url = "http://other.upload.error:8081"
responses.add(responses.POST, test_url, body=None, status=500)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file)
- assert 'Failed to upload to Nexus with status code' in str(excinfo.value)
+ assert "Failed to upload to Nexus with status code" in str(excinfo.value)
def test__request_post_file_data(responses, mocker):
"""Test _request_post_file."""
- param={'r':(None, 'testing')}
- zip_file='zip-test-files/test.zip'
+ param = {"r": (None, "testing")}
+ zip_file = "zip-test-files/test.zip"
resp = {}
- test_url='http://connection.error.test'
+ test_url = "http://connection.error.test"
exception = requests.exceptions.ConnectionError(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Could not connect to URL' in str(excinfo.value)
+ assert "Could not connect to URL" in str(excinfo.value)
- test_url='http://invalid.url.test:8081'
+ test_url = "http://invalid.url.test:8081"
exception = requests.exceptions.InvalidURL(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Invalid URL' in str(excinfo.value)
+ assert "Invalid URL" in str(excinfo.value)
- test_url='http://missing.schema.test:8081'
+ test_url = "http://missing.schema.test:8081"
exception = requests.exceptions.MissingSchema(test_url)
responses.add(responses.POST, test_url, body=exception)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Not valid URL' in str(excinfo.value)
+ assert "Not valid URL" in str(excinfo.value)
- test_url='http://repository.read.only:8081'
+ test_url = "http://repository.read.only:8081"
responses.add(responses.POST, test_url, body=None, status=400)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Repository is read only' in str(excinfo.value)
+ assert "Repository is read only" in str(excinfo.value)
- test_url='http://repository.not.found:8081'
+ test_url = "http://repository.not.found:8081"
responses.add(responses.POST, test_url, body=None, status=404)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Did not find repository' in str(excinfo.value)
+ assert "Did not find repository" in str(excinfo.value)
- test_url='http://other.upload.error:8081'
+ test_url = "http://other.upload.error:8081"
responses.add(responses.POST, test_url, body=None, status=500)
with pytest.raises(requests.HTTPError) as excinfo:
resp = deploy_sys._request_post_file(test_url, zip_file, param)
- assert 'Failed to upload to Nexus with status code' in str(excinfo.value)
+ assert "Failed to upload to Nexus with status code" in str(excinfo.value)
def test_nexus_stage_repo_close(responses, mocker):
"""Test nexus_stage_repo_close."""
- mocker.patch('lftools.deploy._log_error_and_exit', side_effect=mocked_log_error)
- url='service/local/staging/profiles'
+ mocker.patch("lftools.deploy._log_error_and_exit", side_effect=mocked_log_error)
+ url = "service/local/staging/profiles"
- responses.add(responses.POST, 'http://valid.create.post/{}/{}/finish'.format(url, '93fb68073c18' ),
- body=None, status=201)
- deploy_sys.nexus_stage_repo_close('valid.create.post', '93fb68073c18', 'test1-1027')
+ responses.add(
+ responses.POST, "http://valid.create.post/{}/{}/finish".format(url, "93fb68073c18"), body=None, status=201
+ )
+ deploy_sys.nexus_stage_repo_close("valid.create.post", "93fb68073c18", "test1-1027")
xml_site_not_found = """
<html><head><title>404 - Site Not Found</title></head>
<body><h1>404 - Site not found</h1></body>
</html>
"""
- responses.add(responses.POST, 'http://site.not.found/{}/{}/finish'.format(url, 'INVALID'),
- body=xml_site_not_found, status=404)
+ responses.add(
+ responses.POST, "http://site.not.found/{}/{}/finish".format(url, "INVALID"), body=xml_site_not_found, status=404
+ )
with pytest.raises(ValueError) as excinfo:
- deploy_sys.nexus_stage_repo_close('site.not.found', 'INVALID', 'test1-1027')
- assert 'site.not.found' in str(excinfo.value)
+ deploy_sys.nexus_stage_repo_close("site.not.found", "INVALID", "test1-1027")
+ assert "site.not.found" in str(excinfo.value)
xml_missing_staging_repository = """
<nexus-error><errors><error>
<msg>Unhandled: Missing staging repository: test1-1</msg>
</error></errors></nexus-error>
"""
- responses.add(responses.POST, 'http://missing.staging.repository/{}/{}/finish'.format(url, 'INVALID'),
- body=xml_missing_staging_repository, status=500)
+ responses.add(
+ responses.POST,
+ "http://missing.staging.repository/{}/{}/finish".format(url, "INVALID"),
+ body=xml_missing_staging_repository,
+ status=500,
+ )
with pytest.raises(ValueError) as excinfo:
- deploy_sys.nexus_stage_repo_close('missing.staging.repository', 'INVALID', 'test1-1027')
- assert 'missing.staging.repository' in str(excinfo.value)
+ deploy_sys.nexus_stage_repo_close("missing.staging.repository", "INVALID", "test1-1027")
+ assert "missing.staging.repository" in str(excinfo.value)
xml_staging_already_closed = """
<nexus-error><errors><error>
<msg>Unhandled: Repository: test1-1000 has invalid state: closed</msg>
</error></errors></nexus-error>
"""
- responses.add(responses.POST, 'http://staging.already.closed/{}/{}/finish'.format(url, 'INVALID'),
- body=xml_staging_already_closed, status=500)
+ responses.add(
+ responses.POST,
+ "http://staging.already.closed/{}/{}/finish".format(url, "INVALID"),
+ body=xml_staging_already_closed,
+ status=500,
+ )
with pytest.raises(ValueError) as excinfo:
- deploy_sys.nexus_stage_repo_close('staging.already.closed', 'INVALID', 'test1-1027')
- assert 'staging.already.closed' in str(excinfo.value)
+ deploy_sys.nexus_stage_repo_close("staging.already.closed", "INVALID", "test1-1027")
+ assert "staging.already.closed" in str(excinfo.value)
xml_other_error_occured = """
<html><head><title>303 - See Other</title></head>
<body><h1>303 - See Other</h1></body>
</html>
"""
- responses.add(responses.POST, 'http://other.error.occured/{}/{}/finish'.format(url, 'INVALID'),
- body=xml_other_error_occured, status=303)
+ responses.add(
+ responses.POST,
+ "http://other.error.occured/{}/{}/finish".format(url, "INVALID"),
+ body=xml_other_error_occured,
+ status=303,
+ )
with pytest.raises(ValueError) as excinfo:
- deploy_sys.nexus_stage_repo_close('other.error.occured', 'INVALID', 'test1-1027')
- assert 'other.error.occured' in str(excinfo.value)
+ deploy_sys.nexus_stage_repo_close("other.error.occured", "INVALID", "test1-1027")
+ assert "other.error.occured" in str(excinfo.value)
def test_nexus_stage_repo_create(responses, mocker):
"""Test nexus_stage_repo_create."""
- mocker.patch('lftools.deploy._log_error_and_exit', side_effect=mocked_log_error)
- url = 'service/local/staging/profiles'
+ mocker.patch("lftools.deploy._log_error_and_exit", side_effect=mocked_log_error)
+ url = "service/local/staging/profiles"
xml_created = "<stagedRepositoryId>test1-1030</stagedRepositoryId>"
- responses.add(responses.POST, 'http://valid.create.post/{}/{}/start'.format(url, '93fb68073c18' ),
- body=xml_created, status=201)
- res = deploy_sys.nexus_stage_repo_create('valid.create.post', '93fb68073c18')
- assert res == 'test1-1030'
+ responses.add(
+ responses.POST, "http://valid.create.post/{}/{}/start".format(url, "93fb68073c18"), body=xml_created, status=201
+ )
+ res = deploy_sys.nexus_stage_repo_create("valid.create.post", "93fb68073c18")
+ assert res == "test1-1030"
xml_profile_id_dont_exist = """
<nexus-error><errors><error>
<msg>Cannot create Staging Repository, profile with id 'INVALID' does not exist.</msg>
</error></errors></nexus-error>
"""
- responses.add(responses.POST, 'http://profile.id_not.exist/{}/{}/start'.format(url, 'INVALID' ),
- body=xml_profile_id_dont_exist, status=404)
+ responses.add(
+ responses.POST,
+ "http://profile.id_not.exist/{}/{}/start".format(url, "INVALID"),
+ body=xml_profile_id_dont_exist,
+ status=404,
+ )
with pytest.raises(ValueError) as excinfo:
- res = deploy_sys.nexus_stage_repo_create('profile.id_not.exist', 'INVALID')
- assert 'profile.id.not.exist' in str(excinfo.value)
+ res = deploy_sys.nexus_stage_repo_create("profile.id_not.exist", "INVALID")
+ assert "profile.id.not.exist" in str(excinfo.value)
- xml_other_create_error = "<nexus-error><errors><error><id>*</id><msg>OTHER create error.</msg></error></errors></nexus-error>"
- responses.add(responses.POST, 'http://other.create.error/{}/{}/start'.format(url, 'INVALID' ),
- body=xml_other_create_error, status=404)
+ xml_other_create_error = (
+ "<nexus-error><errors><error><id>*</id><msg>OTHER create error.</msg></error></errors></nexus-error>"
+ )
+ responses.add(
+ responses.POST,
+ "http://other.create.error/{}/{}/start".format(url, "INVALID"),
+ body=xml_other_create_error,
+ status=404,
+ )
with pytest.raises(ValueError) as excinfo:
- res = deploy_sys.nexus_stage_repo_create('other.create.error', 'INVALID')
- assert 'other.create.error' in str(excinfo.value)
+ res = deploy_sys.nexus_stage_repo_create("other.create.error", "INVALID")
+ assert "other.create.error" in str(excinfo.value)
xml_other_error_occured = """
<html>
<body><h1>303 - See Other</h1></body>
</html>
"""
- responses.add(responses.POST, 'http://other.error.occured/{}/{}/start'.format(url, 'INVALID' ),
- body=xml_other_error_occured, status=303)
+ responses.add(
+ responses.POST,
+ "http://other.error.occured/{}/{}/start".format(url, "INVALID"),
+ body=xml_other_error_occured,
+ status=303,
+ )
with pytest.raises(ValueError) as excinfo:
- res = deploy_sys.nexus_stage_repo_create('other.error.occured', 'INVALID')
- assert 'other.error.occured' in str(excinfo.value)
+ res = deploy_sys.nexus_stage_repo_create("other.error.occured", "INVALID")
+ assert "other.error.occured" in str(excinfo.value)
xml_post_not_supported = """
<html>
</body>
</html>
"""
- responses.add(responses.POST, 'http://post.not.supported/{}/{}/start'.format(url, 'INVALID' ),
- body=xml_post_not_supported, status=405)
+ responses.add(
+ responses.POST,
+ "http://post.not.supported/{}/{}/start".format(url, "INVALID"),
+ body=xml_post_not_supported,
+ status=405,
+ )
with pytest.raises(ValueError) as excinfo:
- res = deploy_sys.nexus_stage_repo_create('post.not.supported', 'INVALID')
- assert 'post.not.supported' in str(excinfo.value)
+ res = deploy_sys.nexus_stage_repo_create("post.not.supported", "INVALID")
+ assert "post.not.supported" in str(excinfo.value)
xml_site_not_found = """
<html><head><title>404 - Site Not Found</title></head>
<body><h1>404 - Site not found</h1></body>
</html>
"""
- responses.add(responses.POST, 'http://site.not.found/{}/{}/start'.format(url, 'INVALID' ),
- body=xml_site_not_found, status=404)
+ responses.add(
+ responses.POST, "http://site.not.found/{}/{}/start".format(url, "INVALID"), body=xml_site_not_found, status=404
+ )
with pytest.raises(ValueError) as excinfo:
- res = deploy_sys.nexus_stage_repo_create('site.not.found', 'INVALID')
- assert 'site.not.found' in str(excinfo.value)
+ res = deploy_sys.nexus_stage_repo_create("site.not.found", "INVALID")
+ assert "site.not.found" in str(excinfo.value)
def test__upload_maven_file_to_nexus(responses, mocker):
"""Test upload_to_nexus."""
- zip_file='zip-test-files/test.tar.xz'
- common_urlpart='service/local/artifact/maven/content'
+ zip_file = "zip-test-files/test.tar.xz"
+ common_urlpart = "service/local/artifact/maven/content"
- nexus_repo_id='testing'
- group_id='com5.test'
- artifact_id='ArtId2'
- version='1.2.7'
- packaging='tar.xz'
- classified=None
+ nexus_repo_id = "testing"
+ group_id = "com5.test"
+ artifact_id = "ArtId2"
+ version = "1.2.7"
+ packaging = "tar.xz"
+ classified = None
resp = {}
- test_url='http://all.ok.upload:8081'
- responses.add(responses.POST, '{}/{}'.format(test_url, common_urlpart), body=None, status=201)
- resp = deploy_sys.upload_maven_file_to_nexus(test_url, nexus_repo_id, group_id, artifact_id, version, packaging, zip_file)
+ test_url = "http://all.ok.upload:8081"
+ responses.add(responses.POST, "{}/{}".format(test_url, common_urlpart), body=None, status=201)
+ resp = deploy_sys.upload_maven_file_to_nexus(
+ test_url, nexus_repo_id, group_id, artifact_id, version, packaging, zip_file
+ )
xml_other_error = """
<nexus-error><errors><error>
<msg>Something went wrong.</msg>
</error></errors></nexus-error>
"""
- test_url='http://something.went.wrong:8081'
- responses.add(responses.POST, '{}/{}'.format(test_url, common_urlpart), body=xml_other_error, status=405)
+ test_url = "http://something.went.wrong:8081"
+ responses.add(responses.POST, "{}/{}".format(test_url, common_urlpart), body=xml_other_error, status=405)
with pytest.raises(requests.HTTPError) as excinfo:
- resp = deploy_sys.upload_maven_file_to_nexus(test_url, nexus_repo_id, group_id, artifact_id, version, packaging, zip_file)
- assert 'Something went wrong' in str(excinfo.value)
+ resp = deploy_sys.upload_maven_file_to_nexus(
+ test_url, nexus_repo_id, group_id, artifact_id, version, packaging, zip_file
+ )
+ assert "Something went wrong" in str(excinfo.value)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_nexus_snapshot(datafiles, responses):
"""Test deploy_nexus with snapshot.
There are two files that should not be uploaded.
"""
os.chdir(str(datafiles))
- nexus_url = 'http://successfull.nexus.deploy/nexus/content/repositories/releases'
- deploy_dir = 'm2repo'
+ nexus_url = "http://successfull.nexus.deploy/nexus/content/repositories/releases"
+ deploy_dir = "m2repo"
# Test success - Snapshot
snapshot = True
- test_files = ['4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5',
- '4.0.3-SNAPSHOT/maven-metadata.xml',
- '4.0.3-SNAPSHOT/maven-metadata.xml.md5',
- '4.0.3-SNAPSHOT/maven-metadata.xml.sha1',
- 'maven-metadata.xml',
- 'maven-metadata.xml.md5',
- 'maven-metadata.xml.sha1']
+ test_files = [
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5",
+ "4.0.3-SNAPSHOT/maven-metadata.xml",
+ "4.0.3-SNAPSHOT/maven-metadata.xml.md5",
+ "4.0.3-SNAPSHOT/maven-metadata.xml.sha1",
+ "maven-metadata.xml",
+ "maven-metadata.xml.md5",
+ "maven-metadata.xml.sha1",
+ ]
for file in test_files:
- success_upload_url = '{}/{}'.format(nexus_url, file)
- responses.add(responses.PUT, success_upload_url,
- status=201)
+ success_upload_url = "{}/{}".format(nexus_url, file)
+ responses.add(responses.PUT, success_upload_url, status=201)
deploy_sys.deploy_nexus(nexus_url, deploy_dir, snapshot)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_nexus_nosnapshot(datafiles, responses):
"""Test deploy_nexus with no snapshot.
There are six files that should not be uploaded, and three that should.
"""
os.chdir(str(datafiles))
- nexus_url = 'http://successfull.nexus.deploy/nexus/content/repositories/releases'
- deploy_dir = 'm2repo'
+ nexus_url = "http://successfull.nexus.deploy/nexus/content/repositories/releases"
+ deploy_dir = "m2repo"
# Test success - No Snapshot
- test_files = ['4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5']
+ test_files = [
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5",
+ ]
for file in test_files:
- success_upload_url = '{}/{}'.format(nexus_url, file)
- responses.add(responses.PUT, success_upload_url,
- status=201)
+ success_upload_url = "{}/{}".format(nexus_url, file)
+ responses.add(responses.PUT, success_upload_url, status=201)
deploy_sys.deploy_nexus(nexus_url, deploy_dir)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'deploy'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "deploy"),)
def test_deploy_nexus_stage(datafiles, responses):
"""Test deploy_nexus_stage."""
- url='http://valid.deploy.stage'
- url_repo = 'service/local/staging/profiles'
- staging_profile_id='93fb68073c18'
- repo_id='test1-1030'
+ url = "http://valid.deploy.stage"
+ url_repo = "service/local/staging/profiles"
+ staging_profile_id = "93fb68073c18"
+ repo_id = "test1-1030"
- #Setup for nexus_stage_repo_create
+ # Setup for nexus_stage_repo_create
xml_created = "<stagedRepositoryId>{}</stagedRepositoryId>".format(repo_id)
- responses.add(responses.POST, '{}/{}/{}/start'.format(url, url_repo, staging_profile_id),
- body=xml_created, status=201)
+ responses.add(
+ responses.POST, "{}/{}/{}/start".format(url, url_repo, staging_profile_id), body=xml_created, status=201
+ )
- #Setup for deploy_nexus with no snapshot
+ # Setup for deploy_nexus with no snapshot
os.chdir(str(datafiles))
- nexus_deploy_url = '{}/service/local/staging/deployByRepositoryId/{}'.format(url, repo_id)
- deploy_dir = 'm2repo'
- test_files = ['4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1',
- '4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5']
+ nexus_deploy_url = "{}/service/local/staging/deployByRepositoryId/{}".format(url, repo_id)
+ deploy_dir = "m2repo"
+ test_files = [
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.sha1",
+ "4.0.3-SNAPSHOT/odlparent-lite-4.0.3-20181120.113136-1.pom.md5",
+ ]
for file in test_files:
- success_upload_url = '{}/{}'.format(nexus_deploy_url, file)
- responses.add(responses.PUT, success_upload_url,
- status=201)
+ success_upload_url = "{}/{}".format(nexus_deploy_url, file)
+ responses.add(responses.PUT, success_upload_url, status=201)
- #Setup for nexus_stage_repo_close
- responses.add(responses.POST, '{}/{}/{}/finish'.format(url, url_repo, staging_profile_id),
- body=None, status=201)
+ # Setup for nexus_stage_repo_close
+ responses.add(responses.POST, "{}/{}/{}/finish".format(url, url_repo, staging_profile_id), body=None, status=201)
- #Execute test, should not return anything for successful run.
- deploy_sys.deploy_nexus_stage (url, staging_profile_id, deploy_dir)
+ # Execute test, should not return anything for successful run.
+ deploy_sys.deploy_nexus_stage(url, staging_profile_id, deploy_dir)
from lftools import cli
-FIXTURE_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'fixtures',
- )
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'license'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "license"),)
def test_check_license(cli_runner, datafiles):
"""Test check_license() command."""
os.chdir(str(datafiles))
# Check that license checker passes when file has license.
- result = cli_runner.invoke(cli.cli, ['license', 'check', 'license.py'], obj={})
+ result = cli_runner.invoke(cli.cli, ["license", "check", "license.py"], obj={})
# noqa: B101 .
assert result.exit_code == 0
# Check that license checker fails when file is missing license.
- result = cli_runner.invoke(cli.cli, ['license', 'check', 'no_license1.py'], obj={})
+ result = cli_runner.invoke(cli.cli, ["license", "check", "no_license1.py"], obj={})
# noqa: B101 .
assert result.exit_code == 1
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'license'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "license"),)
def test_check_license_directory(cli_runner, datafiles):
"""Test check_license_directory() command."""
os.chdir(str(datafiles))
# Check that check-dir fails due to directory containing files
# with no license.
- result = cli_runner.invoke(cli.cli, ['license', 'check-dir', '.'], obj={})
+ result = cli_runner.invoke(cli.cli, ["license", "check-dir", "."], obj={})
# noqa: B101 .
assert result.exit_code == 1
# Check that check-dir passes when directory contains files with licenses
- os.remove('no_license1.py')
- os.remove('no_license2.py')
- result = cli_runner.invoke(cli.cli, ['license', 'check-dir', '.'], obj={})
+ os.remove("no_license1.py")
+ os.remove("no_license2.py")
+ result = cli_runner.invoke(cli.cli, ["license", "check-dir", "."], obj={})
# noqa: B101 .
assert result.exit_code == 0
from lftools.nexus import util
-FIXTURE_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)), 'fixtures')
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures")
@pytest.fixture
responses.add(responses.GET, baseurl_endpoint, status=200)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'nexus'))
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "nexus"))
def test_create_roles(datafiles, responses, nexus2_obj_create):
"""Test create_roles() method with good config."""
os.chdir(str(datafiles))
def test_create_repo_target_regex():
"""Test create_repo_target_regex() command."""
- odlparent = util.create_repo_target_regex('org.opendaylight.odlparent')
+ odlparent = util.create_repo_target_regex("org.opendaylight.odlparent")
odlparent_regex = re.compile(odlparent)
assert odlparent_regex.match(
- '/org/opendaylight/odlparent/odlparent'
- '/4.0.0-SNAPSHOT/odlparent-4.0.0-20180424.132124-69.pom'
+ "/org/opendaylight/odlparent/odlparent" "/4.0.0-SNAPSHOT/odlparent-4.0.0-20180424.132124-69.pom"
)
- honeycomb = util.create_repo_target_regex('org.opendaylight.honeycomb.vbd')
+ honeycomb = util.create_repo_target_regex("org.opendaylight.honeycomb.vbd")
honeycomb_regex = re.compile(honeycomb)
assert honeycomb_regex.match(
- '/org/opendaylight/honeycomb/vbd/odl-vbd'
- '/1.4.0-SNAPSHOT/odl-vbd-1.4.0-20180422.024456-12-features.xml'
+ "/org/opendaylight/honeycomb/vbd/odl-vbd" "/1.4.0-SNAPSHOT/odl-vbd-1.4.0-20180422.024456-12-features.xml"
)
- mso = util.create_repo_target_regex('org.openecomp.mso')
+ mso = util.create_repo_target_regex("org.openecomp.mso")
mso_regex = re.compile(mso)
- assert mso_regex.match(
- '/org/openecomp/mso/'
- '1.1.0-SNAPSHOT/mso-1.1.0-20170606.171056-26.pom'
- )
+ assert mso_regex.match("/org/openecomp/mso/" "1.1.0-SNAPSHOT/mso-1.1.0-20170606.171056-26.pom")
- dcaegen2 = util.create_repo_target_regex('org.onap.dcaegen2')
+ dcaegen2 = util.create_repo_target_regex("org.onap.dcaegen2")
dcaegen2_regex = re.compile(dcaegen2)
- assert dcaegen2_regex.match(
- '/org/onap/dcaegen2/'
- '1.2.0-SNAPSHOT/dcaegen2-1.2.0-20180403.182529-10.pom'
- )
+ assert dcaegen2_regex.match("/org/onap/dcaegen2/" "1.2.0-SNAPSHOT/dcaegen2-1.2.0-20180403.182529-10.pom")
- vpp = util.create_repo_target_regex('io.fd.vpp')
+ vpp = util.create_repo_target_regex("io.fd.vpp")
vpp_regex = re.compile(vpp)
- assert vpp_regex.match('/io/fd/vpp/jvpp/16.06/jvpp-16.06.jar')
+ assert vpp_regex.match("/io/fd/vpp/jvpp/16.06/jvpp-16.06.jar")
from lftools import cli
import lftools.nexus.release_docker_hub as rdh
+
def test_remove_http_from_url():
"""Test _remove_http_from_url."""
- test_url=[["192.168.1.1", "192.168.1.1"],
- ["192.168.1.1:8081", "192.168.1.1:8081"],
- ["http://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
- ["192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
- ["http://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
- ["https://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
- ["https://192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
- ["http://www.goodnexussite.org:8081", "www.goodnexussite.org:8081"]]
+ test_url = [
+ ["192.168.1.1", "192.168.1.1"],
+ ["192.168.1.1:8081", "192.168.1.1:8081"],
+ ["http://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
+ ["192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
+ ["http://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
+ ["https://192.168.1.1:8081/nexus", "192.168.1.1:8081/nexus"],
+ ["https://192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
+ ["http://www.goodnexussite.org:8081", "www.goodnexussite.org:8081"],
+ ]
for url in test_url:
assert rdh._remove_http_from_url(url[0]) == url[1]
def test_format_image_id():
"""Test _remove_http_from_url."""
- test_id=[["b9e15a5d1e1a", "b9e15a5d1e1a"],
- ["sha256:b9e15a5d1e1a", "b9e15a5d1e1a"],
- ["sha256:3450464d68", "3450464d68"],
- ["192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
- ["sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c", "3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c"]]
+ test_id = [
+ ["b9e15a5d1e1a", "b9e15a5d1e1a"],
+ ["sha256:b9e15a5d1e1a", "b9e15a5d1e1a"],
+ ["sha256:3450464d68", "3450464d68"],
+ ["192.168.1.1:8081/nexus/", "192.168.1.1:8081/nexus/"],
+ [
+ "sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c",
+ "3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c",
+ ],
+ ]
for id in test_id:
assert rdh._format_image_id(id[0]) == id[1]
def test_tag_class_valid_tags():
"""Test TagClass"""
- org = 'onap'
- repo = 'base/sdc-sanity'
- test_tags =["1.2.3", "1.22.333", "111.22.3", "10.11.12", "1.0.3"]
- rdh.initialize (org)
- tags = rdh.TagClass (org, repo)
+ org = "onap"
+ repo = "base/sdc-sanity"
+ test_tags = ["1.2.3", "1.22.333", "111.22.3", "10.11.12", "1.0.3"]
+ rdh.initialize(org)
+ tags = rdh.TagClass(org, repo)
for tag in test_tags:
tags.add_tag(tag)
assert len(tags.valid) == len(test_tags)
assert len(tags.invalid) == 0
+
def test_tag_class_invalid_tags():
"""Test TagClass"""
- org = 'onap'
- repo = 'base/sdc-sanity'
- test_tags =["v1.2.3", "1.22", "111.22.3a", "10.11.12.3", "draft",
- "1.2.jan14", "1.2.3.4.5.6.7.8", "1", "latest", "v0.1.0",
- "1.1-20170906T011834", "2.0-20180221T152423",
- "1.3.0-20181121T1329", "1.1.2-SNAPSHOT-20181231T234559Z",
- "1.1.2-STAGING-20181231T234559Z"]
- rdh.initialize (org)
- tags = rdh.TagClass (org, repo)
+ org = "onap"
+ repo = "base/sdc-sanity"
+ test_tags = [
+ "v1.2.3",
+ "1.22",
+ "111.22.3a",
+ "10.11.12.3",
+ "draft",
+ "1.2.jan14",
+ "1.2.3.4.5.6.7.8",
+ "1",
+ "latest",
+ "v0.1.0",
+ "1.1-20170906T011834",
+ "2.0-20180221T152423",
+ "1.3.0-20181121T1329",
+ "1.1.2-SNAPSHOT-20181231T234559Z",
+ "1.1.2-STAGING-20181231T234559Z",
+ ]
+ rdh.initialize(org)
+ tags = rdh.TagClass(org, repo)
for tag in test_tags:
tags.add_tag(tag)
assert len(tags.invalid) == len(test_tags)
assert len(tags.valid) == 0
+
def test_tag_class_repository_exist():
"""Test TagClass"""
- org = 'onap'
- repo = 'base/sdc-sanity'
- rdh.initialize (org)
- tags = rdh.TagClass (org, repo)
+ org = "onap"
+ repo = "base/sdc-sanity"
+ rdh.initialize(org)
+ tags = rdh.TagClass(org, repo)
assert tags.repository_exist == True
+
def test_nexus_tag_class(responses):
"""Test NexusTagClass"""
- org = 'onap'
- repo = 'base/sdc-sanity'
- url = 'https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list'
+ org = "onap"
+ repo = "base/sdc-sanity"
+ url = "https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list"
answer = '{"name":"onap/base_sdc-sanity","tags":["latest","1.3.0","1.3.1","1.4.0","1.4.1","v1.0.0"]}'
- answer_valid_tags = ["1.3.0","1.3.1","1.4.0","1.4.1"]
- answer_invalid_tags = ["latest", "v1.0.0" ]
+ answer_valid_tags = ["1.3.0", "1.3.1", "1.4.0", "1.4.1"]
+ answer_invalid_tags = ["latest", "v1.0.0"]
responses.add(responses.GET, url, body=answer, status=200)
- rdh.initialize (org)
- test_tags = rdh.NexusTagClass (org, repo)
+ rdh.initialize(org)
+ test_tags = rdh.NexusTagClass(org, repo)
for tag in answer_valid_tags:
assert tag in test_tags.valid
for tag in answer_invalid_tags:
def test_docker_tag_class(responses):
"""Test DockerTagClass"""
- org = 'onap'
- repo = 'base-sdc-sanity'
- url = 'https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags'
+ org = "onap"
+ repo = "base-sdc-sanity"
+ url = "https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags"
answer = """[{"layer": "", "name": "latest"},
{"layer": "", "name": "1.3.0"},
{"layer": "", "name": "1.3.1"},
{"layer": "", "name": "1.4.1"},
{"layer": "", "name": "v1.0.0"}]
"""
- answer_valid_tags = ["1.3.0","1.3.1","1.4.0","1.4.1"]
+ answer_valid_tags = ["1.3.0", "1.3.1", "1.4.0", "1.4.1"]
answer_invalid_tags = ["latest", "v1.0.0"]
responses.add(responses.GET, url, body=answer, status=200)
- rdh.initialize (org)
- test_tags = rdh.DockerTagClass (org, repo)
+ rdh.initialize(org)
+ test_tags = rdh.DockerTagClass(org, repo)
for tag in answer_valid_tags:
assert tag in test_tags.valid
for tag in answer_invalid_tags:
docker api.
"""
- _test_image_long_id = 'sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c'
- _test_image_short_id = 'sha256:3450464d68'
- _expected_nexus_image_str = ['nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.0',
- 'nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.1']
+ _test_image_long_id = "sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c"
+ _test_image_short_id = "sha256:3450464d68"
+ _expected_nexus_image_str = [
+ "nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.0",
+ "nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.1",
+ ]
class mock_image:
- id = ''
- short_id = ''
+ id = ""
+ short_id = ""
+
def __init__(self, id, short_id):
self.id = id
self.short_id = short_id
nbr_exc = nbr_exceptions
- def mocked_docker_pull(self, nexus_image_str, count, tag, retry_text='', progbar=False):
+ def mocked_docker_pull(self, nexus_image_str, count, tag, retry_text="", progbar=False):
"""Mocking Pull an image from Nexus."""
if not nexus_image_str in self._expected_nexus_image_str:
- raise ValueError('Wrong nexus project in pull')
- image = self.mock_image (self._test_image_long_id, self._test_image_short_id)
+ raise ValueError("Wrong nexus project in pull")
+ image = self.mock_image(self._test_image_long_id, self._test_image_short_id)
self.counter.pull = self.counter.pull + 1
if self.counter.pull > self.nbr_exc.pull:
return image
else:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
- def mocked_docker_tag(self, count, image, tag, retry_text='', progbar=False):
+ def mocked_docker_tag(self, count, image, tag, retry_text="", progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
- if not tag in ["1.4.0","1.4.1"]:
- raise ValueError('Wrong tag in docker_tag')
+ raise ValueError("Wrong image id in remove")
+ if not tag in ["1.4.0", "1.4.1"]:
+ raise ValueError("Wrong tag in docker_tag")
self.counter.tag = self.counter.tag + 1
if self.counter.tag <= self.nbr_exc.tag:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
def mocked_docker_push(self, count, image, tag, retry_text, progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
- if not tag in ["1.4.0","1.4.1"]:
- raise ValueError('Wrong tag in push')
+ raise ValueError("Wrong image id in remove")
+ if not tag in ["1.4.0", "1.4.1"]:
+ raise ValueError("Wrong tag in push")
self.counter.push = self.counter.push + 1
if self.counter.push <= self.nbr_exc.push:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
- def mocked_docker_cleanup(self, count, image, tag, retry_text='', progbar=False):
+ def mocked_docker_cleanup(self, count, image, tag, retry_text="", progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
+ raise ValueError("Wrong image id in remove")
self.counter.cleanup = self.counter.cleanup + 1
if self.counter.cleanup <= self.nbr_exc.cleanup:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
def test_ProjectClass_2_missing(self, responses, mocker):
"""Test ProjectClass"""
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_pull', side_effect=self.mocked_docker_pull)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_tag', side_effect=self.mocked_docker_tag)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_push', side_effect=self.mocked_docker_push)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup', side_effect=self.mocked_docker_cleanup)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_pull", side_effect=self.mocked_docker_pull)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_tag", side_effect=self.mocked_docker_tag)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_push", side_effect=self.mocked_docker_push)
+ mocker.patch(
+ "lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup", side_effect=self.mocked_docker_cleanup
+ )
- project = ('onap', 'base/sdc-sanity')
+ project = ("onap", "base/sdc-sanity")
- nexus_url = 'https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list'
+ nexus_url = "https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list"
nexus_answer = '{"name":"onap/base_sdc-sanity","tags":["1.3.0","1.3.1","1.4.0","1.4.1","v1.0.0"]}'
- docker_url = 'https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags'
+ docker_url = "https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags"
docker_answer = """[{"layer": "", "name": "1.3.0"},
{"layer": "", "name": "1.3.1"},
{"layer": "", "name": "v1.0.0"}]
"""
- nexus_answer_valid_tags = ["1.3.0","1.3.1","1.4.0","1.4.1"]
+ nexus_answer_valid_tags = ["1.3.0", "1.3.1", "1.4.0", "1.4.1"]
nexus_answer_invalid_tags = ["v1.0.0"]
- docker_answer_valid_tags = ["1.3.0","1.3.1"]
+ docker_answer_valid_tags = ["1.3.0", "1.3.1"]
docker_answer_invalid_tags = ["v1.0.0"]
- docker_missing_tags = ["1.4.0","1.4.1"]
+ docker_missing_tags = ["1.4.0", "1.4.1"]
self.counter.pull = self.counter.tag = self.counter.push = self.counter.cleanup = 0
responses.add(responses.GET, nexus_url, body=nexus_answer, status=200)
responses.add(responses.GET, docker_url, body=docker_answer, status=200)
- rdh.initialize ('onap')
- test_proj = rdh.ProjectClass (project)
+ rdh.initialize("onap")
+ test_proj = rdh.ProjectClass(project)
- assert test_proj.org_name == 'onap'
- assert test_proj.nexus_repo_name == 'base/sdc-sanity'
- assert test_proj.docker_repo_name == 'base-sdc-sanity'
- assert test_proj.calc_docker_project_name() == 'onap/base-sdc-sanity'
+ assert test_proj.org_name == "onap"
+ assert test_proj.nexus_repo_name == "base/sdc-sanity"
+ assert test_proj.docker_repo_name == "base-sdc-sanity"
+ assert test_proj.calc_docker_project_name() == "onap/base-sdc-sanity"
assert len(test_proj.nexus_tags.valid) == len(nexus_answer_valid_tags)
assert len(test_proj.docker_tags.valid) == len(docker_answer_valid_tags)
def test_ProjectClass_1_missing(self, responses, mocker):
"""Test ProjectClass"""
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_pull', side_effect=self.mocked_docker_pull)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_tag', side_effect=self.mocked_docker_tag)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_push', side_effect=self.mocked_docker_push)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup', side_effect=self.mocked_docker_cleanup)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_pull", side_effect=self.mocked_docker_pull)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_tag", side_effect=self.mocked_docker_tag)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_push", side_effect=self.mocked_docker_push)
+ mocker.patch(
+ "lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup", side_effect=self.mocked_docker_cleanup
+ )
- project = ('onap', 'base/sdc-sanity')
+ project = ("onap", "base/sdc-sanity")
- nexus_url = 'https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list'
+ nexus_url = "https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list"
nexus_answer = '{"name":"onap/base_sdc-sanity","tags":["1.3.0","1.3.1","1.4.0","v1.0.0"]}'
- docker_url = 'https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags'
+ docker_url = "https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags"
docker_answer = """[{"layer": "", "name": "1.3.0"},
{"layer": "", "name": "1.3.1"},
{"layer": "", "name": "v1.0.0"}]
"""
- nexus_answer_valid_tags = ["1.3.0","1.3.1","1.4.0"]
+ nexus_answer_valid_tags = ["1.3.0", "1.3.1", "1.4.0"]
nexus_answer_invalid_tags = ["v1.0.0"]
- docker_answer_valid_tags = ["1.3.0","1.3.1"]
+ docker_answer_valid_tags = ["1.3.0", "1.3.1"]
docker_answer_invalid_tags = ["v1.0.0"]
docker_missing_tags = ["1.4.0"]
responses.add(responses.GET, nexus_url, body=nexus_answer, status=200)
responses.add(responses.GET, docker_url, body=docker_answer, status=200)
- rdh.initialize ('onap')
- test_proj = rdh.ProjectClass (project)
+ rdh.initialize("onap")
+ test_proj = rdh.ProjectClass(project)
- assert test_proj.org_name == 'onap'
- assert test_proj.nexus_repo_name == 'base/sdc-sanity'
- assert test_proj.docker_repo_name == 'base-sdc-sanity'
- assert test_proj.calc_docker_project_name() == 'onap/base-sdc-sanity'
+ assert test_proj.org_name == "onap"
+ assert test_proj.nexus_repo_name == "base/sdc-sanity"
+ assert test_proj.docker_repo_name == "base-sdc-sanity"
+ assert test_proj.calc_docker_project_name() == "onap/base-sdc-sanity"
assert len(test_proj.nexus_tags.valid) == len(nexus_answer_valid_tags)
assert len(test_proj.docker_tags.valid) == len(docker_answer_valid_tags)
assert self.counter.push == 1
assert self.counter.cleanup == 1
- def test_ProjectClass_socket_timeout (self, responses, mocker):
+ def test_ProjectClass_socket_timeout(self, responses, mocker):
"""Test ProjectClass"""
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_pull', side_effect=self.mocked_docker_pull)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_tag', side_effect=self.mocked_docker_tag)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_push', side_effect=self.mocked_docker_push)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup', side_effect=self.mocked_docker_cleanup)
-
- project = ('onap', 'base/sdc-sanity')
- nexus_url = 'https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list'
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_pull", side_effect=self.mocked_docker_pull)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_tag", side_effect=self.mocked_docker_tag)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_push", side_effect=self.mocked_docker_push)
+ mocker.patch(
+ "lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup", side_effect=self.mocked_docker_cleanup
+ )
+
+ project = ("onap", "base/sdc-sanity")
+ nexus_url = "https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list"
nexus_answer = '{"name":"onap/base_sdc-sanity","tags":["1.3.0","1.3.1","1.4.0","v1.0.0"]}'
- docker_url = 'https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags'
+ docker_url = "https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags"
docker_answer = """[{"layer": "", "name": "1.3.0"},
{"layer": "", "name": "1.3.1"},
{"layer": "", "name": "v1.0.0"}]
"""
- nexus_answer_valid_tags = ["1.3.0","1.3.1","1.4.0"]
+ nexus_answer_valid_tags = ["1.3.0", "1.3.1", "1.4.0"]
nexus_answer_invalid_tags = ["v1.0.0"]
- docker_answer_valid_tags = ["1.3.0","1.3.1"]
+ docker_answer_valid_tags = ["1.3.0", "1.3.1"]
docker_answer_invalid_tags = ["v1.0.0"]
docker_missing_tags = ["1.4.0"]
responses.add(responses.GET, nexus_url, body=nexus_answer, status=200)
responses.add(responses.GET, docker_url, body=docker_answer, status=200)
- rdh.initialize ('onap')
- test_proj = rdh.ProjectClass (project)
+ rdh.initialize("onap")
+ test_proj = rdh.ProjectClass(project)
- assert test_proj.org_name == 'onap'
- assert test_proj.nexus_repo_name == 'base/sdc-sanity'
- assert test_proj.docker_repo_name == 'base-sdc-sanity'
- assert test_proj.calc_docker_project_name() == 'onap/base-sdc-sanity'
+ assert test_proj.org_name == "onap"
+ assert test_proj.nexus_repo_name == "base/sdc-sanity"
+ assert test_proj.docker_repo_name == "base-sdc-sanity"
+ assert test_proj.calc_docker_project_name() == "onap/base-sdc-sanity"
assert len(test_proj.nexus_tags.valid) == len(nexus_answer_valid_tags)
assert len(test_proj.docker_tags.valid) == len(docker_answer_valid_tags)
assert tag in test_proj.tags_2_copy.valid
assert len(test_proj.tags_2_copy.valid) == len(docker_missing_tags)
- #Verify that 90 timeout's on any stage failes.
+ # Verify that 90 timeout's on any stage failes.
self.nbr_exc.pull = 90
with pytest.raises(requests.HTTPError) as excinfo:
test_proj.docker_pull_tag_push()
with pytest.raises(requests.HTTPError) as excinfo:
test_proj.docker_pull_tag_push()
- #Verify 89 timeouts and the 90 is ok per stage
+ # Verify 89 timeouts and the 90 is ok per stage
self.counter.pull = self.counter.tag = self.counter.push = self.counter.cleanup = 0
self.nbr_exc.pull = self.nbr_exc.tag = self.nbr_exc.push = self.nbr_exc.cleanup = 89
test_proj.docker_pull_tag_push()
class TestFetchNexus3Catalog:
- url = 'https://nexus3.onap.org:10002/v2/_catalog'
+ url = "https://nexus3.onap.org:10002/v2/_catalog"
answer = """
{"repositories":["dcae_dmaapbc","onap/aaf/aaf-base-openssl_1.1.0","onap/aaf/aaf-base-xenial","onap/aaf/aaf_agent","onap/aaf/aaf_cass","onap/aaf/aaf_cm","onap/aaf/aaf_config","onap/aaf/aaf_core","onap/aaf/aaf_fs","onap/aaf/aaf_gui","onap/aaf/aaf_hello","onap/aaf/aaf_locate","onap/aaf/aaf_oauth","onap/aaf/aaf_service","onap/aaf/abrmd","onap/aaf/distcenter","onap/aaf/sms","onap/aaf/smsquorumclient","onap/aaf/testcaservice","onap/aai-cacher","onap/aai-graphadmin","onap/aai-resources","onap/aai-traversal","onap/aai/esr-gui","onap/aai/esr-server","onap/admportal-sdnc-image","onap/appc-cdt-image","onap/appc-image","onap/babel","onap/base_sdc-cassandra","onap/base_sdc-cqlsh","onap/base_sdc-elasticsearch","onap/base_sdc-jetty","onap/base_sdc-kibana","onap/base_sdc-python","onap/base_sdc-sanity","onap/base_sdc-vnc","onap/ccsdk-ansible-server-image","onap/ccsdk-apps-ms-neng","onap/ccsdk-controllerblueprints","onap/ccsdk-dgbuilder-image","onap/ccsdk-odl-image","onap/ccsdk-odl-oxygen-image","onap/ccsdk-odlsli-image","onap/ccsdk-ubuntu-image","onap/chameleon","onap/champ","onap/clamp","onap/clamp-dashboard-kibana","onap/clamp-dashboard-logstash","onap/cli","onap/data-router","onap/dcae-be","onap/dcae-ci-tests","onap/dcae-dt","onap/dcae-fe","onap/dcae-tools","onap/dcae-tosca-app","onap/dmaap/buscontroller","onap/dmaap/datarouter-node","onap/dmaap/datarouter-prov","onap/dmaap/datarouter-subscriber","onap/dmaap/dmaap-mr","onap/dmaap/kafka01101","onap/externalapi/nbi","onap/gallifrey","onap/gizmo","onap/holmes/engine-management","onap/holmes/rule-management","onap/model-loader","onap/msb/msb_apigateway","onap/msb/msb_base","onap/msb/msb_discovery","onap/multicloud/azure","onap/multicloud/framework","onap/multicloud/openstack-newton","onap/multicloud/openstack-ocata","onap/multicloud/openstack-pike","onap/multicloud/openstack-windriver","onap/multicloud/openstack/openstack-ocata","onap/multicloud/vio","onap/multicloud/vio-vesagent","onap/music/cassandra_3_11","onap/music/cassandra_job","onap/music/cassandra_music","onap/music/music","onap/music/prom","onap/network-discovery","onap/oom/kube2msb","onap/optf-cmso-dbinit","onap/optf-cmso-service","onap/optf-has","onap/optf-osdf","onap/org.onap.dcaegen2.collectors.datafile.datafile-app-server","onap/org.onap.dcaegen2.collectors.hv-ves.hv-collector-main","onap/org.onap.dcaegen2.collectors.snmptrap","onap/org.onap.dcaegen2.collectors.ves.vescollector","onap/org.onap.dcaegen2.deployments.bootstrap","onap/org.onap.dcaegen2.deployments.cm-container","onap/org.onap.dcaegen2.deployments.healthcheck-container","onap/org.onap.dcaegen2.deployments.k8s-bootstrap-container","onap/org.onap.dcaegen2.deployments.redis-cluster-container","onap/org.onap.dcaegen2.deployments.tca-cdap-container","onap/org.onap.dcaegen2.deployments.tls-init-container","onap/org.onap.dcaegen2.platform.cdapbroker","onap/org.onap.dcaegen2.platform.configbinding","onap/org.onap.dcaegen2.platform.configbinding.app-app","onap/org.onap.dcaegen2.platform.deployment-handler","onap/org.onap.dcaegen2.platform.inventory-api","onap/org.onap.dcaegen2.platform.policy-handler","onap/org.onap.dcaegen2.platform.servicechange-handler","onap/org.onap.dcaegen2.services.prh.prh-app-server","onap/policy-apex-pdp","onap/policy-distribution","onap/policy-drools","onap/policy-pe","onap/policy/policy-db","onap/policy/policy-drools","onap/policy/policy-nexus","onap/policy/policy-pe","onap/pomba-aai-context-builder","onap/pomba-context-aggregator","onap/pomba-network-discovery-context-builder","onap/pomba-sdc-context-builder","onap/portal-app","onap/portal-apps","onap/portal-db","onap/portal-sdk","onap/portal-wms","onap/refrepo/postgres","onap/sdc-api-tests","onap/sdc-backend","onap/sdc-backend-init","onap/sdc-cassandra","onap/sdc-cassandra-init","onap/sdc-elasticsearch","onap/sdc-frontend","onap/sdc-init-elasticsearch","onap/sdc-kibana","onap/sdc-onboard-backend","onap/sdc-onboard-cassandra-init","onap/sdc-simulator","onap/sdc-ui-tests","onap/sdc/sdc-workflow-designer","onap/sdnc-ansible-server-image","onap/sdnc-dmaap-listener-image","onap/sdnc-image","onap/sdnc-ueb-listener-image","onap/search-data-service","onap/service-decomposition","onap/sniroemulator","onap/so/api-handler-infra","onap/so/asdc-controller","onap/so/base-image","onap/so/bpmn-infra","onap/so/catalog-db-adapter","onap/so/openstack-adapter","onap/so/request-db-adapter","onap/so/sdc-controller","onap/so/sdnc-adapter","onap/so/so-monitoring","onap/so/vfc-adapter","onap/sparky-be","onap/spike","onap/testsuite","onap/usecase-ui","onap/usecase-ui-server","onap/usecase-ui/usecase-ui-server","onap/validation","onap/vfc/catalog","onap/vfc/db","onap/vfc/emsdriver","onap/vfc/gvnfmdriver","onap/vfc/jujudriver","onap/vfc/multivimproxy","onap/vfc/nfvo/svnfm/huawei","onap/vfc/nfvo/svnfm/nokia","onap/vfc/nfvo/svnfm/nokiav2","onap/vfc/nslcm","onap/vfc/resmanagement","onap/vfc/vnflcm","onap/vfc/vnfmgr","onap/vfc/vnfres","onap/vfc/wfengine-activiti","onap/vfc/wfengine-mgrservice","onap/vfc/ztesdncdriver","onap/vfc/ztevmanagerdriver","onap/vfc/ztevnfmdriver","onap/vid","onap/vnfsdk/ice","onap/vnfsdk/refrepo","onap/vnfsdk/refrepo/postgres","onap/vnfsdk/vnftest","onap/vvp/cms","onap/vvp/engagementmgr","onap/vvp/gitlab","onap/vvp/image-scanner","onap/vvp/jenkins","onap/vvp/portal","onap/vvp/postgresql","onap/vvp/test-engine","onap/workflow-backend","onap/workflow-frontend","onap/workflow-init","openecomp/aai-cacher","openecomp/aai-resources","openecomp/aai-traversal","openecomp/appc-image","openecomp/base_sdc-backend","openecomp/base_sdc-cassandra","openecomp/base_sdc-elasticsearch","openecomp/base_sdc-frontend","openecomp/base_sdc-kibana","openecomp/base_sdc-sanity","openecomp/jacoco","openecomp/mso","openecomp/mso-arquillian","openecomp/portalapps","openecomp/portaldb","openecomp/sdc-backend","openecomp/sdc-cassandra","openecomp/sdc-elasticsearch","openecomp/sdc-frontend","openecomp/sdc-kibana","openecomp/sdc-sanity","openecomp/ubuntu-update","openecomp/vid","openecomp/wildfly"]}
"""
def test_get_all_onap(self):
rdh.NexusCatalog = []
- rdh.initialize ('onap')
+ rdh.initialize("onap")
responses.add(responses.GET, self.url, body=self.answer, status=200)
- rdh.get_nexus3_catalog ('onap')
+ rdh.get_nexus3_catalog("onap")
assert len(rdh.NexusCatalog) == 203
def test_get_all_onap_and_filter_1(self):
rdh.NexusCatalog = []
- rdh.initialize ('onap')
+ rdh.initialize("onap")
responses.add(responses.GET, self.url, body=self.answer, status=200)
- rdh.get_nexus3_catalog ('onap', 'spike')
+ rdh.get_nexus3_catalog("onap", "spike")
assert len(rdh.NexusCatalog) == 1
- assert rdh.NexusCatalog[0][0] == 'onap'
- assert rdh.NexusCatalog[0][1] == 'spike'
+ assert rdh.NexusCatalog[0][0] == "onap"
+ assert rdh.NexusCatalog[0][1] == "spike"
def test_get_all_onap_and_filter_18(self):
rdh.NexusCatalog = []
- rdh.initialize ('onap')
+ rdh.initialize("onap")
responses.add(responses.GET, self.url, body=self.answer, status=200)
- rdh.get_nexus3_catalog ('onap', 'aaf')
+ rdh.get_nexus3_catalog("onap", "aaf")
assert len(rdh.NexusCatalog) == 18
def test_get_all_onap_and_specify_1_repo_1(self):
rdh.NexusCatalog = []
- rdh.initialize ('onap')
+ rdh.initialize("onap")
responses.add(responses.GET, self.url, body=self.answer, status=200)
- rdh.get_nexus3_catalog ('onap', 'clamp', True)
+ rdh.get_nexus3_catalog("onap", "clamp", True)
assert len(rdh.NexusCatalog) == 1
- assert rdh.NexusCatalog[0][1] == 'clamp'
+ assert rdh.NexusCatalog[0][1] == "clamp"
def test_get_all_onap_and_specify_1_repo_2(self):
rdh.NexusCatalog = []
- rdh.initialize ('onap')
+ rdh.initialize("onap")
responses.add(responses.GET, self.url, body=self.answer, status=200)
- rdh.get_nexus3_catalog ('onap', 'clamp-dashboard-logstash', True)
+ rdh.get_nexus3_catalog("onap", "clamp-dashboard-logstash", True)
assert len(rdh.NexusCatalog) == 1
- assert rdh.NexusCatalog[0][1] == 'clamp-dashboard-logstash'
+ assert rdh.NexusCatalog[0][1] == "clamp-dashboard-logstash"
class TestFetchAllTagsAndUpdate:
- _test_image_long_id = 'sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c'
- _test_image_short_id = 'sha256:3450464d68'
- _expected_nexus_image_str = ['nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.0',
- 'nexus3.onap.org:10002/onap/gizmo2:1.3.1',
- 'nexus3.onap.org:10002/onap/gizmo2:1.3.2'
- ]
+ _test_image_long_id = "sha256:3450464d68c9443dedc8bfe3272a23e6441c37f707c42d32fee0ebdbcd319d2c"
+ _test_image_short_id = "sha256:3450464d68"
+ _expected_nexus_image_str = [
+ "nexus3.onap.org:10002/onap/base/sdc-sanity:1.4.0",
+ "nexus3.onap.org:10002/onap/gizmo2:1.3.1",
+ "nexus3.onap.org:10002/onap/gizmo2:1.3.2",
+ ]
+
class mock_image:
- id = ''
- short_id = ''
+ id = ""
+ short_id = ""
+
def __init__(self, id, short_id):
self.id = id
self.short_id = short_id
nbr_exc = nbr_exceptions
- def mocked_docker_pull(self, nexus_image_str, count, tag, retry_text='', progbar=False):
+ def mocked_docker_pull(self, nexus_image_str, count, tag, retry_text="", progbar=False):
"""Mocking Pull an image from Nexus."""
if not nexus_image_str in self._expected_nexus_image_str:
- print ("IMAGESTR {}".format(nexus_image_str))
- raise ValueError('Wrong nexus project in pull')
- image = self.mock_image (self._test_image_long_id, self._test_image_short_id)
+ print("IMAGESTR {}".format(nexus_image_str))
+ raise ValueError("Wrong nexus project in pull")
+ image = self.mock_image(self._test_image_long_id, self._test_image_short_id)
self.counter.pull = self.counter.pull + 1
if self.counter.pull > self.nbr_exc.pull:
return image
else:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
- def mocked_docker_tag(self, count, image, tag, retry_text='', progbar=False):
+ def mocked_docker_tag(self, count, image, tag, retry_text="", progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
- if not tag in ["1.4.0","1.3.1","1.3.2"]:
- raise ValueError('Wrong tag in docker_tag')
+ raise ValueError("Wrong image id in remove")
+ if not tag in ["1.4.0", "1.3.1", "1.3.2"]:
+ raise ValueError("Wrong tag in docker_tag")
self.counter.tag = self.counter.tag + 1
if self.counter.tag <= self.nbr_exc.tag:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
def mocked_docker_push(self, count, image, tag, retry_text, progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
- if not tag in ["1.4.0","1.3.1","1.3.2"]:
- raise ValueError('Wrong tag in push')
+ raise ValueError("Wrong image id in remove")
+ if not tag in ["1.4.0", "1.3.1", "1.3.2"]:
+ raise ValueError("Wrong tag in push")
self.counter.push = self.counter.push + 1
if self.counter.push <= self.nbr_exc.push:
- raise requests.exceptions.ConnectionError('Connection Error')
+ raise requests.exceptions.ConnectionError("Connection Error")
- def mocked_docker_cleanup(self, count, image, tag, retry_text='', progbar=False):
+ def mocked_docker_cleanup(self, count, image, tag, retry_text="", progbar=False):
"""Mocking Tag the image with proper docker name and version."""
if not image.id == self._test_image_long_id:
- raise ValueError('Wrong image id in remove')
+ raise ValueError("Wrong image id in remove")
self.counter.cleanup = self.counter.cleanup + 1
if self.counter.cleanup <= self.nbr_exc.cleanup:
- raise requests.exceptions.ConnectionError('Connection Error')
-
- def initiate_test_fetch(self, responses, mocker, repo=''):
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_pull', side_effect=self.mocked_docker_pull)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_tag', side_effect=self.mocked_docker_tag)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_push', side_effect=self.mocked_docker_push)
- mocker.patch('lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup', side_effect=self.mocked_docker_cleanup)
- url = 'https://nexus3.onap.org:10002/v2/_catalog'
+ raise requests.exceptions.ConnectionError("Connection Error")
+
+ def initiate_test_fetch(self, responses, mocker, repo=""):
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_pull", side_effect=self.mocked_docker_pull)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_tag", side_effect=self.mocked_docker_tag)
+ mocker.patch("lftools.nexus.release_docker_hub.ProjectClass._docker_push", side_effect=self.mocked_docker_push)
+ mocker.patch(
+ "lftools.nexus.release_docker_hub.ProjectClass._docker_cleanup", side_effect=self.mocked_docker_cleanup
+ )
+ url = "https://nexus3.onap.org:10002/v2/_catalog"
answer = '{"repositories":["onap/base/sdc-sanity","onap/gizmo","onap/gizmo2"]}'
- nexus_url1 = 'https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list'
+ nexus_url1 = "https://nexus3.onap.org:10002/v2/onap/base/sdc-sanity/tags/list"
nexus_answer1 = '{"name":"onap/base_sdc-sanity","tags":["1.3.0","1.3.1","1.4.0","v1.0.0"]}'
- docker_url1 = 'https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags'
+ docker_url1 = "https://registry.hub.docker.com/v1/repositories/onap/base-sdc-sanity/tags"
docker_answer1 = """[{"layer": "", "name": "1.3.0"},
{"layer": "", "name": "1.3.1"},
{"layer": "", "name": "v1.0.0"}]
"""
- nexus_url2 = 'https://nexus3.onap.org:10002/v2/onap/gizmo/tags/list'
+ nexus_url2 = "https://nexus3.onap.org:10002/v2/onap/gizmo/tags/list"
nexus_answer2 = '{"name":"onap/gizmo","tags":["1.3.0"]}'
- docker_url2 = 'https://registry.hub.docker.com/v1/repositories/onap/gizmo/tags'
+ docker_url2 = "https://registry.hub.docker.com/v1/repositories/onap/gizmo/tags"
docker_answer2 = """[{"layer": "", "name": "1.3.0"}]
"""
- nexus_url3 = 'https://nexus3.onap.org:10002/v2/onap/gizmo2/tags/list'
+ nexus_url3 = "https://nexus3.onap.org:10002/v2/onap/gizmo2/tags/list"
nexus_answer3 = '{"name":"onap/gizmo2","tags":["1.3.0", "1.3.1", "1.3.2"]}'
- docker_url3 = 'https://registry.hub.docker.com/v1/repositories/onap/gizmo2/tags'
+ docker_url3 = "https://registry.hub.docker.com/v1/repositories/onap/gizmo2/tags"
docker_answer3 = """[{"layer": "", "name": "1.3.0"}]
"""
responses.add(responses.GET, url, body=answer, status=200)
self.counter.pull = self.counter.tag = self.counter.push = self.counter.cleanup = 0
def initiate_bogus_org_test_fetch(self, responses, org):
- url = 'https://nexus3.{}.org:10002/v2/_catalog'.format(org)
- exception = requests.HTTPError("Issues with URL: {} - <class 'requests.exceptions.ConnectionError'>".format(url))
+ url = "https://nexus3.{}.org:10002/v2/_catalog".format(org)
+ exception = requests.HTTPError(
+ "Issues with URL: {} - <class 'requests.exceptions.ConnectionError'>".format(url)
+ )
responses.add(responses.GET, url, body=exception)
rdh.NexusCatalog = []
rdh.projects = []
def test_fetch_all_tags(self, responses, mocker):
self.initiate_test_fetch(responses, mocker)
- rdh.initialize ('onap')
- rdh.get_nexus3_catalog ('onap')
+ rdh.initialize("onap")
+ rdh.get_nexus3_catalog("onap")
rdh.fetch_all_tags()
assert len(rdh.NexusCatalog) == 3
assert len(rdh.projects) == 3
assert len(rdh.projects[1].tags_2_copy.valid) == 0
assert len(rdh.projects[2].tags_2_copy.valid) == 2
- assert rdh.projects[0].tags_2_copy.valid[0] == '1.4.0'
- assert rdh.projects[2].tags_2_copy.valid[0] == '1.3.1'
- assert rdh.projects[2].tags_2_copy.valid[1] == '1.3.2'
+ assert rdh.projects[0].tags_2_copy.valid[0] == "1.4.0"
+ assert rdh.projects[2].tags_2_copy.valid[0] == "1.3.1"
+ assert rdh.projects[2].tags_2_copy.valid[1] == "1.3.2"
def test_fetch_from_bogus_orgs(self, responses, mocker):
- self.initiate_bogus_org_test_fetch(responses, 'bogus_org321')
- rdh.initialize ('bogus_org321')
- rdh.get_nexus3_catalog ('bogus_org321')
+ self.initiate_bogus_org_test_fetch(responses, "bogus_org321")
+ rdh.initialize("bogus_org321")
+ rdh.get_nexus3_catalog("bogus_org321")
assert len(rdh.NexusCatalog) == 0
assert len(rdh.projects) == 0
def test_copy(self, responses, mocker):
self.initiate_test_fetch(responses, mocker)
- rdh.initialize ('onap')
- rdh.get_nexus3_catalog ('onap')
+ rdh.initialize("onap")
+ rdh.get_nexus3_catalog("onap")
rdh.fetch_all_tags()
rdh.copy_from_nexus_to_docker()
assert self.counter.pull == 3
def test_start_no_copy(self, responses, mocker):
self.initiate_test_fetch(responses, mocker)
- rdh.start_point ('onap', '', False, False)
+ rdh.start_point("onap", "", False, False)
assert self.counter.pull == 0
assert self.counter.tag == 0
assert self.counter.push == 0
def test_start_copy(self, responses, mocker):
self.initiate_test_fetch(responses, mocker)
- rdh.start_point ('onap', '', False, False, False, True)
+ rdh.start_point("onap", "", False, False, False, True)
assert len(rdh.NexusCatalog) == 3
assert len(rdh.projects) == 3
assert len(rdh.projects[0].tags_2_copy.valid) == 1
assert len(rdh.projects[1].tags_2_copy.valid) == 0
assert len(rdh.projects[2].tags_2_copy.valid) == 2
- assert rdh.projects[0].tags_2_copy.valid[0] == '1.4.0'
- assert rdh.projects[2].tags_2_copy.valid[0] == '1.3.1'
- assert rdh.projects[2].tags_2_copy.valid[1] == '1.3.2'
+ assert rdh.projects[0].tags_2_copy.valid[0] == "1.4.0"
+ assert rdh.projects[2].tags_2_copy.valid[0] == "1.3.1"
+ assert rdh.projects[2].tags_2_copy.valid[1] == "1.3.2"
assert self.counter.pull == 3
assert self.counter.tag == 3
assert self.counter.push == 3
assert self.counter.cleanup == 3
def test_start_copy_repo(self, responses, mocker):
- self.initiate_test_fetch(responses, mocker, 'sanity')
- rdh.start_point ('onap', 'sanity', False, False, False, True)
+ self.initiate_test_fetch(responses, mocker, "sanity")
+ rdh.start_point("onap", "sanity", False, False, False, True)
assert len(rdh.NexusCatalog) == 1
assert len(rdh.projects) == 1
assert len(rdh.projects[0].tags_2_copy.valid) == 1
- assert rdh.projects[0].tags_2_copy.valid[0] == '1.4.0'
+ assert rdh.projects[0].tags_2_copy.valid[0] == "1.4.0"
assert self.counter.pull == 1
assert self.counter.tag == 1
assert self.counter.push == 1
assert self.counter.cleanup == 1
def test_start_bogus_orgs(self, responses):
- self.initiate_bogus_org_test_fetch(responses, 'bogus_org321')
- rdh.start_point ('bogus_org321')
+ self.initiate_bogus_org_test_fetch(responses, "bogus_org321")
+ rdh.start_point("bogus_org321")
assert len(rdh.NexusCatalog) == 0
assert len(rdh.projects) == 0
import lftools.api.endpoints.readthedocs as client
-creds = {
- 'authtype': 'token',
- 'endpoint': 'https://readthedocs.org/api/v3/',
- 'token': 'xyz'
-}
+creds = {"authtype": "token", "endpoint": "https://readthedocs.org/api/v3/", "token": "xyz"}
rtd = client.ReadTheDocs(creds=creds)
-FIXTURE_DIR = os.path.join(os.path.dirname(
- os.path.realpath(__file__)), 'fixtures',)
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_list(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_list.json', 'r')
+ json_file = open("project_list.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/',
- json=json_data, status=200)
- assert 'TestProject1' in rtd.project_list()
+ responses.add(responses.GET, url="https://readthedocs.org/api/v3/projects/", json=json_data, status=200)
+ assert "TestProject1" in rtd.project_list()
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_details(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_details.json', 'r')
+ json_file = open("project_details.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/TestProject1/',
- json=json_data, status=200)
- assert 'slug' in rtd.project_details('TestProject1')
+ responses.add(
+ responses.GET, url="https://readthedocs.org/api/v3/projects/TestProject1/", json=json_data, status=200
+ )
+ assert "slug" in rtd.project_details("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_version_list(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_version_list.json', 'r')
+ json_file = open("project_version_list.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/TestProject1/versions/?active=True', # noqa
- json=json_data, status=200, match_querystring=True)
- assert 'test-trigger6' in rtd.project_version_list('TestProject1')
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/TestProject1/versions/?active=True", # noqa
+ json=json_data,
+ status=200,
+ match_querystring=True,
+ )
+ assert "test-trigger6" in rtd.project_version_list("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_version_details(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_version_details.json', 'r')
+ json_file = open("project_version_details.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/TestProject1/versions/latest/', # noqa
- json=json_data, status=200)
- assert 'slug' in rtd.project_version_details('TestProject1', 'latest')
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/TestProject1/versions/latest/", # noqa
+ json=json_data,
+ status=200,
+ )
+ assert "slug" in rtd.project_version_details("TestProject1", "latest")
@responses.activate
def test_project_version_update():
- data = {
- 'active': True
- }
- responses.add(responses.PATCH,
- url='https://readthedocs.org/api/v3/projects/TestProject1/versions/latest/', # noqa
- json=data, status=204)
- assert rtd.project_version_update('TestProject1', 'latest',
- 'True')
+ data = {"active": True}
+ responses.add(
+ responses.PATCH,
+ url="https://readthedocs.org/api/v3/projects/TestProject1/versions/latest/", # noqa
+ json=data,
+ status=204,
+ )
+ assert rtd.project_version_update("TestProject1", "latest", "True")
@responses.activate
def test_project_create():
data = {
- 'name': 'TestProject1',
- 'repository': {
- 'url': 'https://repository_url',
- 'type': 'my_repo_type'
- },
- 'homepage': 'https://homepageurl',
- 'programming_language': 'py',
- 'language': 'en'
+ "name": "TestProject1",
+ "repository": {"url": "https://repository_url", "type": "my_repo_type"},
+ "homepage": "https://homepageurl",
+ "programming_language": "py",
+ "language": "en",
}
- responses.add(responses.POST,
- url='https://readthedocs.org/api/v3/projects/',
- json=data, status=201)
- assert rtd.project_create('TestProject1', 'https://repository_url',
- 'my_repo_type', 'https://homepageurl',
- 'py', 'en')
+ responses.add(responses.POST, url="https://readthedocs.org/api/v3/projects/", json=data, status=201)
+ assert rtd.project_create(
+ "TestProject1", "https://repository_url", "my_repo_type", "https://homepageurl", "py", "en"
+ )
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_build_list(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_build_list.json', 'r')
+ json_file = open("project_build_list.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/testproject1/builds/?running=True', # noqa
- json=json_data, status=200, match_querystring=True)
- assert 'success' in rtd.project_build_list('testproject1')
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/testproject1/builds/?running=True", # noqa
+ json=json_data,
+ status=200,
+ match_querystring=True,
+ )
+ assert "success" in rtd.project_build_list("testproject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_project_build_details(datafiles):
os.chdir(str(datafiles))
- json_file = open('project_build_details.json', 'r')
+ json_file = open("project_build_details.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/testproject1/builds/9584913/', # noqa
- json=json_data, status=200)
- assert 'id' in rtd.project_build_details('testproject1', 9584913)
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/testproject1/builds/9584913/", # noqa
+ json=json_data,
+ status=200,
+ )
+ assert "id" in rtd.project_build_details("testproject1", 9584913)
@responses.activate
def test_project_build_trigger():
- data = {
- 'project': 'testproject1',
- 'version': 'latest'
- }
- responses.add(responses.POST,
- url='https://readthedocs.org/api/v3/projects/testproject1/versions/latest/builds/', # noqa
- json=data, status=201)
- assert rtd.project_build_trigger('testproject1', 'latest')
+ data = {"project": "testproject1", "version": "latest"}
+ responses.add(
+ responses.POST,
+ url="https://readthedocs.org/api/v3/projects/testproject1/versions/latest/builds/", # noqa
+ json=data,
+ status=201,
+ )
+ assert rtd.project_build_trigger("testproject1", "latest")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_subproject_list(datafiles):
os.chdir(str(datafiles))
- json_file = open('subproject_list.json', 'r')
+ json_file = open("subproject_list.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/TestProject1/subprojects/?limit=999', # noqa
- json=json_data, status=200, match_querystring=True)
- assert 'testproject2' in rtd.subproject_list('TestProject1')
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/TestProject1/subprojects/?limit=999", # noqa
+ json=json_data,
+ status=200,
+ match_querystring=True,
+ )
+ assert "testproject2" in rtd.subproject_list("TestProject1")
-@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, 'rtd'),)
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "rtd"),)
@responses.activate
def test_subproject_details(datafiles):
os.chdir(str(datafiles))
- json_file = open('subproject_details.json', 'r')
+ json_file = open("subproject_details.json", "r")
json_data = json.loads(json_file.read())
- responses.add(responses.GET,
- url='https://readthedocs.org/api/v3/projects/TestProject1/subprojects/testproject2/', # NOQA
- json=json_data, status=200)
- assert 'child' in rtd.subproject_details('TestProject1', 'testproject2')
+ responses.add(
+ responses.GET,
+ url="https://readthedocs.org/api/v3/projects/TestProject1/subprojects/testproject2/", # NOQA
+ json=json_data,
+ status=200,
+ )
+ assert "child" in rtd.subproject_details("TestProject1", "testproject2")
@responses.activate
def test_subproject_create():
- responses.add(responses.POST,
- url='https://readthedocs.org/api/v3/projects/TestProject1/subprojects/', # NOQA
- status=201)
- assert rtd.subproject_create('TestProject1', 'testproject2')
+ responses.add(
+ responses.POST, url="https://readthedocs.org/api/v3/projects/TestProject1/subprojects/", status=201 # NOQA
+ )
+ assert rtd.subproject_create("TestProject1", "testproject2")
def test_subproject_delete():
from lftools import cli
-FIXTURE_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'fixtures',
- )
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'schema'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "schema"),)
def test_check_license(cli_runner, datafiles):
"""Test check_schema() command."""
os.chdir(str(datafiles))
# Check that schema passes when schema and yaml are valid.
- result = cli_runner.invoke(cli.cli, ['schema', 'verify', 'release.yaml', 'schema.yaml'], obj={})
+ result = cli_runner.invoke(cli.cli, ["schema", "verify", "release.yaml", "schema.yaml"], obj={})
# noqa: B101 .
assert result.exit_code == 0
# Check that schema fails when schema and yaml are invalid.
- result = cli_runner.invoke(cli.cli, ['schema', 'verify', 'release-broken.yaml', 'schema.yaml'], obj={})
+ result = cli_runner.invoke(cli.cli, ["schema", "verify", "release-broken.yaml", "schema.yaml"], obj={})
# noqa: B101 .
assert result.exit_code == 1
from lftools import cli
-FIXTURE_DIR = os.path.join(
- os.path.dirname(os.path.realpath(__file__)),
- 'fixtures',
- )
+FIXTURE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "fixtures",)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'version_bump'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "version_bump"),)
def test_version_bump(cli_runner, datafiles):
"""Test version bump command."""
os.chdir(str(datafiles))
- cli_runner.invoke(cli.cli, ['version', 'bump', 'TestRelease'], obj={})
+ cli_runner.invoke(cli.cli, ["version", "bump", "TestRelease"], obj={})
for _file in datafiles.listdir():
- pom = str(_file) + '/pom.xml'
- expected_pom = str(_file) + '/pom.xml.expected'
+ pom = str(_file) + "/pom.xml"
+ expected_pom = str(_file) + "/pom.xml.expected"
# noqa: B101 .
assert filecmp.cmp(pom, expected_pom)
-@pytest.mark.datafiles(
- os.path.join(FIXTURE_DIR, 'version_release'),
- )
+@pytest.mark.datafiles(os.path.join(FIXTURE_DIR, "version_release"),)
def test_version_release(cli_runner, datafiles):
"""Test version release command."""
os.chdir(str(datafiles))
- cli_runner.invoke(cli.cli, ['version', 'release', 'TestRelease'], obj={})
+ cli_runner.invoke(cli.cli, ["version", "release", "TestRelease"], obj={})
for _file in datafiles.listdir():
- pom = str(_file) + '/pom.xml'
- expected_pom = str(_file) + '/pom.xml.expected'
+ pom = str(_file) + "/pom.xml"
+ expected_pom = str(_file) + "/pom.xml.expected"
# noqa: B101 .
assert filecmp.cmp(pom, expected_pom)