Merge pull request #47 from vexxhost/fix-superlint
[WIP] Update superlint version
diff --git a/.github/workflows/linters.yaml b/.github/workflows/linters.yaml
index 0d1f3ff..193eabe 100644
--- a/.github/workflows/linters.yaml
+++ b/.github/workflows/linters.yaml
@@ -6,8 +6,10 @@
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - uses: github/super-linter@v3
+ - uses: github/super-linter@v4
env:
DEFAULT_BRANCH: main
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
+ VALIDATE_ALL_CODEBASE: true
+ VALIDATE_PYTHON_MYPY: false
+ VALIDATE_JSCPD: false
diff --git a/README.md b/README.md
index 0146874..d5cdcac 100755
--- a/README.md
+++ b/README.md
@@ -12,7 +12,7 @@
### Function Overview
-The solution backs up all volumes attached to VMs which have a pre-defined metadata set, for
+The solution backs up all volumes attached to VMs which have a predefined metadata set, for
example, `backup=yes`.
First, it gets the list of VMs which have backup metadata and the list of volumes attached to the
VMs in the given project by consuming the Openstack API (nova-api and cinder-api). Once the
diff --git a/doc/source/conf.py b/doc/source/conf.py
index e41b2c9..1c590d6 100755
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -15,35 +15,33 @@
import os
import sys
-sys.path.insert(0, os.path.abspath('../..'))
+sys.path.insert(0, os.path.abspath("../.."))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = [
- 'sphinx.ext.autodoc',
- 'openstackdocstheme',
- #'sphinx.ext.intersphinx',
-]
+extensions = ["sphinx.ext.autodoc", "openstackdocstheme"]
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = u'staffeln'
-copyright = u'2017, OpenStack Developers'
+project = "staffeln"
+copyright = "2017, OpenStack Developers"
# openstackdocstheme options
-openstackdocs_repo_name = 'openstack/staffeln'
-openstackdocs_bug_project = 'replace with the name of the project on Launchpad or the ID from Storyboard'
-openstackdocs_bug_tag = ''
+openstackdocs_repo_name = "openstack/staffeln"
+openstackdocs_bug_project = (
+ "replace with the name of the project on Launchpad or the ID from Storyboard"
+)
+openstackdocs_bug_tag = ""
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
@@ -53,7 +51,7 @@
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'native'
+pygments_style = "native"
# -- Options for HTML output --------------------------------------------------
@@ -62,20 +60,23 @@
# html_theme_path = ["."]
# html_theme = '_theme'
# html_static_path = ['static']
-html_theme = 'openstackdocs'
+html_theme = "openstackdocs"
# Output file base name for HTML help builder.
-htmlhelp_basename = '%sdoc' % project
+htmlhelp_basename = "%sdoc" % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
- ('index',
- '%s.tex' % project,
- u'%s Documentation' % project,
- u'OpenStack Developers', 'manual'),
+ (
+ "index",
+ "%s.tex" % project,
+ "%s Documentation" % project,
+ "OpenStack Developers",
+ "manual",
+ ),
]
# Example configuration for intersphinx: refer to the Python standard library.
-#intersphinx_mapping = {'http://docs.python.org/': None}
+# intersphinx_mapping = {'http://docs.python.org/': None}
diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py
index b3a878b..65d2460 100755
--- a/releasenotes/source/conf.py
+++ b/releasenotes/source/conf.py
@@ -35,32 +35,32 @@
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- 'openstackdocstheme',
- 'reno.sphinxext',
+ "openstackdocstheme",
+ "reno.sphinxext",
]
# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
+templates_path = ["_templates"]
# The suffix of source filenames.
-source_suffix = '.rst'
+source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
-master_doc = 'index'
+master_doc = "index"
# General information about the project.
-project = u'staffeln Release Notes'
-copyright = u'2017, OpenStack Developers'
+project = "staffeln Release Notes"
+copyright = "2017, OpenStack Developers"
# openstackdocstheme options
-openstackdocs_repo_name = 'openstack/staffeln'
+openstackdocs_repo_name = "openstack/staffeln"
openstackdocs_bug_project = """replace with the name of the
project on Launchpad or the ID from Storyboard"""
-openstackdocs_bug_tag = ''
-openstackdocs_auto_name = 'False'
+openstackdocs_bug_tag = ""
+openstackdocs_auto_name = "False"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -68,9 +68,9 @@
#
# The short X.Y version.
# The full version, including alpha/beta/rc tags.
-release = ''
+release = ""
# The short X.Y version.
-version = ''
+version = ""
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@@ -102,7 +102,7 @@
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'native'
+pygments_style = "native"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
@@ -115,7 +115,7 @@
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'openstackdocs'
+html_theme = "openstackdocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
@@ -144,7 +144,7 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@@ -189,7 +189,7 @@
# html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = 'staffelnReleaseNotesdoc'
+htmlhelp_basename = "staffelnReleaseNotesdoc"
# -- Options for LaTeX output ---------------------------------------------
@@ -197,10 +197,8 @@
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
-
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
-
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
@@ -209,9 +207,13 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- ('index', 'staffelnReleaseNotes.tex',
- u'staffeln Release Notes Documentation',
- u'OpenStack Foundation', 'manual'),
+ (
+ "index",
+ "staffelnReleaseNotes.tex",
+ "staffeln Release Notes Documentation",
+ "OpenStack Foundation",
+ "manual",
+ ),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -240,9 +242,13 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
- ('index', 'staffelnrereleasenotes',
- u'staffeln Release Notes Documentation',
- [u'OpenStack Foundation'], 1)
+ (
+ "index",
+ "staffelnrereleasenotes",
+ "staffeln Release Notes Documentation",
+ ["OpenStack Foundation"],
+ 1,
+ )
]
# If true, show URL addresses after external links.
@@ -255,11 +261,15 @@
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
- ('index', 'staffeln ReleaseNotes',
- u'staffeln Release Notes Documentation',
- u'OpenStack Foundation', 'staffelnReleaseNotes',
- 'One line description of project.',
- 'Miscellaneous'),
+ (
+ "index",
+ "staffeln ReleaseNotes",
+ "staffeln Release Notes Documentation",
+ "OpenStack Foundation",
+ "staffelnReleaseNotes",
+ "One line description of project.",
+ "Miscellaneous",
+ ),
]
# Documents to append as an appendix to all manuals.
@@ -275,4 +285,4 @@
# texinfo_no_detailmenu = False
# -- Options for Internationalization output ------------------------------
-locale_dirs = ['locale/']
+locale_dirs = ["locale/"]
diff --git a/setup.py b/setup.py
index 1f988cd..0346ed3 100755
--- a/setup.py
+++ b/setup.py
@@ -16,6 +16,4 @@
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
-setuptools.setup(
- setup_requires=['pbr'],
- pbr=True)
+setuptools.setup(setup_requires=["pbr"], pbr=True)
diff --git a/staffeln/__init__.py b/staffeln/__init__.py
index 686f74c..5612b0d 100755
--- a/staffeln/__init__.py
+++ b/staffeln/__init__.py
@@ -14,6 +14,4 @@
import pbr.version
-
-__version__ = pbr.version.VersionInfo(
- 'staffeln').version_string()
+__version__ = pbr.version.VersionInfo("staffeln").version_string()
diff --git a/staffeln/api/app.py b/staffeln/api/app.py
index 9552746..87d2684 100755
--- a/staffeln/api/app.py
+++ b/staffeln/api/app.py
@@ -1,10 +1,7 @@
-from flask import Flask
-from flask import Response
-from flask import request
+from flask import Flask, Response, request
+from oslo_log import log
from staffeln import objects
from staffeln.common import context
-from oslo_log import log
-
ctx = context.make_context()
app = Flask(__name__)
@@ -14,7 +11,7 @@
@app.route("/v1/backup", methods=["POST"])
def backup_id():
-
+
if "backup_id" not in request.args:
# Return error if the backup_id argument is not provided.
return Response(
@@ -22,7 +19,9 @@
)
# Retrive the backup object from backup_data table with matching backup_id.
- backup = objects.Volume.get_backup_by_backup_id(ctx, request.args["backup_id"])
+ backup = objects.Volume.get_backup_by_backup_id( # pylint: disable=E1120
+ context=ctx, backup_id=request.args["backup_id"]
+ )
# backup_info is None when there is no entry of the backup id in backup_table.
# So the backup should not be the automated backup.
if backup is None:
diff --git a/staffeln/api/middleware/parsable_error.py b/staffeln/api/middleware/parsable_error.py
index d7ce289..2b49f83 100755
--- a/staffeln/api/middleware/parsable_error.py
+++ b/staffeln/api/middleware/parsable_error.py
@@ -19,12 +19,12 @@
"""
from oslo_serialization import jsonutils
-
from staffeln.i18n import _
class ParsableErrorMiddleware(object):
"""Replace error body with something the client can parse."""
+
def __init__(self, app):
self.app = app
@@ -33,33 +33,36 @@
for err_str in app_iter:
err = {}
try:
- err = jsonutils.loads(err_str.decode('utf-8'))
+ err = jsonutils.loads(err_str.decode("utf-8"))
except ValueError:
pass
- if 'title' in err and 'description' in err:
- title = err['title']
- desc = err['description']
- elif 'faultstring' in err:
- title = err['faultstring'].split('.', 1)[0]
- desc = err['faultstring']
+ if "title" in err and "description" in err:
+ title = err["title"]
+ desc = err["description"]
+ elif "faultstring" in err:
+ title = err["faultstring"].split(".", 1)[0]
+ desc = err["faultstring"]
else:
- title = ''
- desc = ''
+ title = ""
+ desc = ""
- code = err['faultcode'].lower() if 'faultcode' in err else ''
+ code = err["faultcode"].lower() if "faultcode" in err else ""
# if already formatted by custom exception, don't update
- if 'min_version' in err:
+ if "min_version" in err:
errs.append(err)
else:
- errs.append({
- 'request_id': '',
- 'code': code,
- 'status': status_code,
- 'title': title,
- 'detail': desc,
- 'links': []})
+ errs.append(
+ {
+ "request_id": "",
+ "code": code,
+ "status": status_code,
+ "title": title,
+ "detail": desc,
+ "links": [],
+ }
+ )
return errs
@@ -71,33 +74,35 @@
def replacement_start_response(status, headers, exc_info=None):
"""Overrides the default response to make errors parsable."""
try:
- status_code = int(status.split(' ')[0])
- state['status_code'] = status_code
+ status_code = int(status.split(" ")[0])
+ state["status_code"] = status_code
except (ValueError, TypeError): # pragma: nocover
- raise Exception(_(
- 'ErrorDocumentMiddleware received an invalid '
- 'status %s') % status)
+ raise Exception(
+ _("ErrorDocumentMiddleware received an invalid " "status %s")
+ % status
+ )
else:
- if (state['status_code'] // 100) not in (2, 3):
+ if (state["status_code"] // 100) not in (2, 3):
# Remove some headers so we can replace them later
# when we have the full error message and can
# compute the length.
- headers = [(h, v)
- for (h, v) in headers
- if h not in ('Content-Length', 'Content-Type')
- ]
+ headers = [
+ (h, v)
+ for (h, v) in headers
+ if h not in ("Content-Length", "Content-Type")
+ ]
# Save the headers in case we need to modify them.
- state['headers'] = headers
+ state["headers"] = headers
return start_response(status, headers, exc_info)
app_iter = self.app(environ, replacement_start_response)
- if (state['status_code'] // 100) not in (2, 3):
- errs = self._update_errors(app_iter, state['status_code'])
- body = [jsonutils.dump_as_bytes({'errors': errs})]
- state['headers'].append(('Content-Type', 'application/json'))
- state['headers'].append(('Content-Length', str(len(body[0]))))
+ if (state["status_code"] // 100) not in (2, 3):
+ errs = self._update_errors(app_iter, state["status_code"])
+ body = [jsonutils.dump_as_bytes({"errors": errs})]
+ state["headers"].append(("Content-Type", "application/json"))
+ state["headers"].append(("Content-Length", str(len(body[0]))))
else:
body = app_iter
diff --git a/staffeln/api/wsgi.py b/staffeln/api/wsgi.py
index 55e080f..bef4092 100755
--- a/staffeln/api/wsgi.py
+++ b/staffeln/api/wsgi.py
@@ -1,2 +1,4 @@
+import app
+
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8080)
diff --git a/staffeln/cmd/api.py b/staffeln/cmd/api.py
index f554354..a46656c 100755
--- a/staffeln/cmd/api.py
+++ b/staffeln/cmd/api.py
@@ -2,13 +2,11 @@
import os
import sys
+import staffeln.conf
from oslo_log import log as logging
-
from staffeln.api import app as api_app
from staffeln.common import service
-import staffeln.conf
from staffeln.i18n import _
-from staffeln import version
CONF = staffeln.conf.CONF
@@ -21,12 +19,10 @@
key_file = CONF.api.ssl_key_file
if cert_file and not os.path.exists(cert_file):
- raise RuntimeError(
- _("Unable to find cert_file : %s") % cert_file)
+ raise RuntimeError(_("Unable to find cert_file : %s") % cert_file)
if key_file and not os.path.exists(key_file):
- raise RuntimeError(
- _("Unable to find key_file : %s") % key_file)
+ raise RuntimeError(_("Unable to find key_file : %s") % key_file)
return cert_file, key_file
else:
@@ -42,11 +38,13 @@
# Create the WSGI server and start it
host, port = CONF.api.host, CONF.api.port
- LOG.info('Starting server in PID %s', os.getpid())
+ LOG.info("Starting server in PID %s", os.getpid())
LOG.debug("Configuration:")
CONF.log_opt_values(LOG, logging.DEBUG)
- LOG.info('Serving on %(proto)s://%(host)s:%(port)s',
- dict(proto="https" if use_ssl else "http", host=host, port=port))
+ LOG.info(
+ "Serving on %(proto)s://%(host)s:%(port)s",
+ dict(proto="https" if use_ssl else "http", host=host, port=port),
+ )
api_app.run(host=host, port=port, ssl_context=_get_ssl_configs(use_ssl))
diff --git a/staffeln/cmd/conductor.py b/staffeln/cmd/conductor.py
index 2550d88..f4c9579 100755
--- a/staffeln/cmd/conductor.py
+++ b/staffeln/cmd/conductor.py
@@ -1,11 +1,10 @@
"""Starter script for the staffeln conductor service."""
import cotyledon
+import staffeln.conf
from cotyledon import oslo_config_glue
-
from staffeln.common import service
from staffeln.conductor import manager
-import staffeln.conf
CONF = staffeln.conf.CONF
@@ -14,9 +13,9 @@
service.prepare_service()
sm = cotyledon.ServiceManager()
- sm.add(manager.BackupManager,
- workers=CONF.conductor.backup_workers, args=(CONF,))
- sm.add(manager.RotationManager,
- workers=CONF.conductor.rotation_workers, args=(CONF,))
+ sm.add(manager.BackupManager, workers=CONF.conductor.backup_workers, args=(CONF,))
+ sm.add(
+ manager.RotationManager, workers=CONF.conductor.rotation_workers, args=(CONF,)
+ )
oslo_config_glue.setup(sm, CONF)
sm.run()
diff --git a/staffeln/cmd/dbmanage.py b/staffeln/cmd/dbmanage.py
index 89b69ba..2331261 100644
--- a/staffeln/cmd/dbmanage.py
+++ b/staffeln/cmd/dbmanage.py
@@ -5,17 +5,14 @@
import sys
from oslo_config import cfg
-
-from staffeln.common import service
from staffeln import conf
+from staffeln.common import service
from staffeln.db import migration
-
CONF = conf.CONF
class DBCommand(object):
-
@staticmethod
def create_schema():
migration.create_schema()
@@ -23,16 +20,13 @@
def add_command_parsers(subparsers):
- parser = subparsers.add_parser(
- 'create_schema',
- help="Create the database schema.")
+ parser = subparsers.add_parser("create_schema", help="Create the database schema.")
parser.set_defaults(func=DBCommand.create_schema)
-command_opt = cfg.SubCommandOpt('command',
- title='Command',
- help='Available commands',
- handler=add_command_parsers)
+command_opt = cfg.SubCommandOpt(
+ "command", title="Command", help="Available commands", handler=add_command_parsers
+)
def register_sub_command_opts():
@@ -42,11 +36,13 @@
def main():
register_sub_command_opts()
- valid_commands = set([
- 'create_schema',
- ])
+ valid_commands = set(
+ [
+ "create_schema",
+ ]
+ )
if not set(sys.argv).intersection(valid_commands):
- sys.argv.append('create_schema')
+ sys.argv.append("create_schema")
service.prepare_service(sys.argv)
CONF.command.func()
diff --git a/staffeln/common/config.py b/staffeln/common/config.py
index dd28201..f71a378 100755
--- a/staffeln/common/config.py
+++ b/staffeln/common/config.py
@@ -7,10 +7,12 @@
def parse_args(argv, default_config_files=None):
# rpc.set_defaults(control_exchange='staffeln')
- CONF(argv[1:],
- project='staffeln',
- version=version.version_info.release_string(),
- default_config_files=default_config_files)
+ CONF(
+ argv[1:],
+ project="staffeln",
+ version=version.version_info.release_string(),
+ default_config_files=default_config_files,
+ )
# rpc.init(CONF)
diff --git a/staffeln/common/constants.py b/staffeln/common/constants.py
index 91b2a95..6b60ef5 100644
--- a/staffeln/common/constants.py
+++ b/staffeln/common/constants.py
@@ -1,9 +1,9 @@
-BACKUP_COMPLETED=2
-BACKUP_WIP=1
-BACKUP_PLANNED=0
+BACKUP_COMPLETED = 2
+BACKUP_WIP = 1
+BACKUP_PLANNED = 0
-BACKUP_ENABLED_KEY = 'true'
-BACKUP_RESULT_CHECK_INTERVAL = 60 # second
+BACKUP_ENABLED_KEY = "true"
+BACKUP_RESULT_CHECK_INTERVAL = 60 # second
# default config values
-DEFAULT_BACKUP_CYCLE_TIMEOUT="5min"
\ No newline at end of file
+DEFAULT_BACKUP_CYCLE_TIMEOUT = "5min"
diff --git a/staffeln/common/context.py b/staffeln/common/context.py
index a223558..c6046e1 100644
--- a/staffeln/common/context.py
+++ b/staffeln/common/context.py
@@ -1,6 +1,5 @@
from oslo_context import context
from oslo_log import log
-from oslo_utils import timeutils
LOG = log.getLogger(__name__)
diff --git a/staffeln/common/email.py b/staffeln/common/email.py
index d88a96d..693feb5 100644
--- a/staffeln/common/email.py
+++ b/staffeln/common/email.py
@@ -1,20 +1,20 @@
# Email notification package
# This should be upgraded by integrating with mail server to send batch
import smtplib
-from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
__DRY_RUN__ = False
def send(
- src_email,
- src_pwd,
- dest_email,
- subject,
- content,
- smtp_server_domain,
- smtp_server_port,
+ src_email,
+ src_pwd,
+ dest_email,
+ subject,
+ content,
+ smtp_server_domain,
+ smtp_server_port,
):
message = MIMEMultipart("alternative")
message["Subject"] = subject
diff --git a/staffeln/common/openstack.py b/staffeln/common/openstack.py
index 7aaf1a0..01e5ff5 100644
--- a/staffeln/common/openstack.py
+++ b/staffeln/common/openstack.py
@@ -1,5 +1,4 @@
-from openstack import exceptions
-from openstack import proxy
+from openstack import exceptions, proxy
from oslo_log import log
from staffeln.common import auth
from staffeln.i18n import _
@@ -7,25 +6,22 @@
LOG = log.getLogger(__name__)
-class OpenstackSDK():
-
+class OpenstackSDK:
def __init__(self):
self.conn_list = {}
self.conn = auth.create_connection()
-
def set_project(self, project):
- LOG.debug(_("Connect as project %s" % project.get('name')))
- project_id = project.get('id')
+ LOG.debug(_("Connect as project %s" % project.get("name")))
+ project_id = project.get("id")
if project_id not in self.conn_list:
- LOG.debug(_("Initiate connection for project %s" % project.get('name')))
+ LOG.debug(_("Initiate connection for project %s" % project.get("name")))
conn = self.conn.connect_as_project(project)
self.conn_list[project_id] = conn
- LOG.debug(_("Connect as project %s" % project.get('name')))
+ LOG.debug(_("Connect as project %s" % project.get("name")))
self.conn = self.conn_list[project_id]
-
# user
def get_user_id(self):
user_name = self.conn.config.auth["username"]
@@ -39,24 +35,17 @@
user = self.conn.get_user(name_or_id=user_name)
return user.id
- ############## project
def get_projects(self):
return self.conn.list_projects()
-
- ############## server
def get_servers(self, project_id, all_projects=True, details=True):
return self.conn.compute.servers(
details=details, all_projects=all_projects, project_id=project_id
)
-
- ############## volume
def get_volume(self, uuid, project_id):
return self.conn.get_volume_by_id(uuid)
-
- ############## backup
def get_backup(self, uuid, project_id=None):
# return conn.block_storage.get_backup(
# project_id=project_id, backup_id=uuid,
@@ -67,16 +56,16 @@
except exceptions.ResourceNotFound:
return None
-
def create_backup(self, volume_id, project_id, force=True, wait=False):
# return conn.block_storage.create_backup(
# volume_id=queue.volume_id, force=True, project_id=queue.project_id,
# )
return self.conn.create_volume_backup(
- volume_id=volume_id, force=force, wait=wait,
+ volume_id=volume_id,
+ force=force,
+ wait=wait,
)
-
def delete_backup(self, uuid, project_id=None, force=False):
# Note(Alex): v3 is not supporting force delete?
# conn.block_storage.delete_backup(
@@ -88,18 +77,16 @@
except exceptions.ResourceNotFound:
return None
-
def get_backup_quota(self, project_id):
# quota = conn.get_volume_quotas(project_id)
quota = self._get_volume_quotas(project_id)
return quota.backups
-
# rewrite openstasdk._block_storage.get_volume_quotas
# added usage flag
# ref: https://docs.openstack.org/api-ref/block-storage/v3/?expanded=#show-quota-usage-for-a-project
def _get_volume_quotas(self, project_id, usage=True):
- """ Get volume quotas for a project
+ """Get volume quotas for a project
:param name_or_id: project name or id
:raises: OpenStackCloudException if it's not a valid project
@@ -109,12 +96,11 @@
if usage:
resp = self.conn.block_storage.get(
- '/os-quota-sets/{project_id}?usage=True'.format(project_id=project_id))
+ "/os-quota-sets/{project_id}?usage=True".format(project_id=project_id)
+ )
else:
resp = self.conn.block_storage.get(
- '/os-quota-sets/{project_id}'.format(project_id=project_id))
- data = proxy._json_response(
- resp,
- error_message="cinder client call failed")
- return self.conn._get_and_munchify('quota_set', data)
-
+ "/os-quota-sets/{project_id}".format(project_id=project_id)
+ )
+ data = proxy._json_response(resp, error_message="cinder client call failed")
+ return self.conn._get_and_munchify("quota_set", data)
diff --git a/staffeln/common/service.py b/staffeln/common/service.py
index 791955b..d2ad7a5 100755
--- a/staffeln/common/service.py
+++ b/staffeln/common/service.py
@@ -12,11 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from oslo_log import log as logging
-
-from staffeln.common import config
-from staffeln import objects
import staffeln.conf
+from oslo_log import log as logging
+from staffeln import objects
+from staffeln.common import config
CONF = staffeln.conf.CONF
@@ -28,4 +27,4 @@
config.parse_args(argv)
config.set_config_defaults()
objects.register_all()
- logging.setup(CONF, 'staffeln')
+ logging.setup(CONF, "staffeln")
diff --git a/staffeln/common/short_id.py b/staffeln/common/short_id.py
index be719c9..18be04c 100755
--- a/staffeln/common/short_id.py
+++ b/staffeln/common/short_id.py
@@ -6,7 +6,6 @@
import uuid
import six
-
from staffeln.i18n import _
@@ -17,9 +16,10 @@
required.
"""
shifts = six.moves.xrange(num_bits - 8, -8, -8)
- byte_at = lambda off: ((value >> off # noqa: E731
- if off >= 0 else value << -off) & 0xff)
- return ''.join(chr(byte_at(offset)) for offset in shifts)
+ byte_at = lambda off: ( # noqa: E731
+ (value >> off if off >= 0 else value << -off) & 0xFF
+ )
+ return "".join(chr(byte_at(offset)) for offset in shifts)
def get_id(source_uuid):
@@ -30,7 +30,7 @@
if isinstance(source_uuid, six.string_types):
source_uuid = uuid.UUID(source_uuid)
if source_uuid.version != 4:
- raise ValueError(_('Invalid UUID version (%d)') % source_uuid.version)
+ raise ValueError(_("Invalid UUID version (%d)") % source_uuid.version)
# The "time" field of a v4 UUID contains 60 random bits
# (see RFC4122, Section 4.4)
@@ -39,7 +39,7 @@
encoded = base64.b32encode(six.b(random_bytes))[:12]
if six.PY3:
- return encoded.lower().decode('utf-8')
+ return encoded.lower().decode("utf-8")
else:
return encoded.lower()
diff --git a/staffeln/common/time.py b/staffeln/common/time.py
index 02e40e1..45e6ffe 100644
--- a/staffeln/common/time.py
+++ b/staffeln/common/time.py
@@ -1,11 +1,13 @@
import re
from datetime import datetime
+
from dateutil.relativedelta import relativedelta
DEFAULT_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
regex = re.compile(
- r'((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?'
+ r"((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?((?P<days>\d+?)d)?"
+ r"((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?"
)
@@ -26,34 +28,33 @@
empty_flag = False
else:
time_params[key] = 0
- if empty_flag: return None
+ if empty_flag:
+ return None
return time_params
- except:
+ except: # noqa: E722
return None
+
def get_current_time():
return datetime.now()
+
def get_current_strtime():
now = datetime.now()
return now.strftime(DEFAULT_TIME_FORMAT)
-def timeago(years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0, from_date=None):
+def timeago(
+ years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0, from_date=None
+):
if from_date is None:
from_date = datetime.now()
- return from_date - relativedelta(years=years, months=months,
- weeks=weeks, days=days, hours=hours,
- minutes=minutes, seconds=seconds)
-
-## yearsago using Standard library
-# def yearsago(years, from_date=None):
-# if from_date is None:
-# from_date = datetime.now()
-# try:
-# return from_date.replace(year=from_date.year - years)
-# except ValueError:
-# # Must be 2/29!
-# assert from_date.month == 2 and from_date.day == 29 # can be removed
-# return from_date.replace(month=2, day=28,
-# year=from_date.year-years)
+ return from_date - relativedelta(
+ years=years,
+ months=months,
+ weeks=weeks,
+ days=days,
+ hours=hours,
+ minutes=minutes,
+ seconds=seconds,
+ )
diff --git a/staffeln/conductor/backup.py b/staffeln/conductor/backup.py
index c79c5d9..9d3ee0e 100755
--- a/staffeln/conductor/backup.py
+++ b/staffeln/conductor/backup.py
@@ -1,31 +1,33 @@
+import collections
+
import parse
import staffeln.conf
-import collections
-from staffeln.common import constants
-from staffeln.conductor import result
+from openstack.exceptions import HttpException as OpenstackHttpException
from openstack.exceptions import ResourceNotFound as OpenstackResourceNotFound
from openstack.exceptions import SDKException as OpenstackSDKException
-from openstack.exceptions import HttpException as OpenstackHttpException
from oslo_log import log
-from staffeln.common import context
from staffeln import objects
+from staffeln.common import constants, context, openstack
+from staffeln.conductor import result
from staffeln.i18n import _
-from staffeln.common import openstack
CONF = staffeln.conf.CONF
LOG = log.getLogger(__name__)
BackupMapping = collections.namedtuple(
- "BackupMapping", ["volume_id", "backup_id", "project_id", "instance_id", "backup_completed"]
+ "BackupMapping",
+ ["volume_id", "backup_id", "project_id", "instance_id", "backup_completed"],
)
QueueMapping = collections.namedtuple(
- "QueueMapping", ["volume_id", "backup_id", "project_id", "instance_id", "backup_status"]
+ "QueueMapping",
+ ["volume_id", "backup_id", "project_id", "instance_id", "backup_status"],
)
def retry_auth(func):
"""Decorator to reconnect openstack and avoid token rotation"""
+
def wrapper(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
@@ -34,6 +36,7 @@
LOG.warn(_("Token has been expired or rotated!"))
self.refresh_openstacksdk()
return func(self, *args, **kwargs)
+
return wrapper
@@ -56,14 +59,18 @@
self.result.initialize()
def get_backups(self, filters=None):
- return objects.Volume.list(self.ctx, filters=filters)
+ return objects.Volume.list( # pylint: disable=E1120
+ context=self.ctx, filters=filters
+ )
def get_backup_quota(self, project_id):
return self.openstacksdk.get_backup_quota(project_id)
def get_queues(self, filters=None):
"""Get the list of volume queue columns from the queue_data table"""
- queues = objects.Queue.list(self.ctx, filters=filters)
+ queues = objects.Queue.list( # pylint: disable=E1120
+ context=self.ctx, filters=filters
+ )
return queues
def create_queue(self, old_tasks):
@@ -77,16 +84,19 @@
# 2. add new tasks in the queue which are not existing in the old task list
queue_list = self.check_instance_volumes()
for queue in queue_list:
- if not queue.volume_id in old_task_volume_list:
+ if queue.volume_id not in old_task_volume_list:
self._volume_queue(queue)
# Backup the volumes attached to which has a specific metadata
def filter_by_server_metadata(self, metadata):
if CONF.conductor.backup_metadata_key is not None:
- if not CONF.conductor.backup_metadata_key in metadata:
+ if CONF.conductor.backup_metadata_key not in metadata:
return False
- return metadata[CONF.conductor.backup_metadata_key].lower() == constants.BACKUP_ENABLED_KEY
+ return (
+ metadata[CONF.conductor.backup_metadata_key].lower()
+ == constants.BACKUP_ENABLED_KEY
+ )
else:
return True
@@ -94,10 +104,14 @@
def filter_by_volume_status(self, volume_id, project_id):
try:
volume = self.openstacksdk.get_volume(volume_id, project_id)
- if volume == None: return False
- res = volume['status'] in ("available", "in-use")
+ if volume is None:
+ return False
+ res = volume["status"] in ("available", "in-use")
if not res:
- reason = _("Volume %s is not backed because it is in %s status" % (volume_id, volume['status']))
+ reason = _(
+ "Volume %s is not backed because it is in %s status"
+ % (volume_id, volume["status"])
+ )
LOG.info(reason)
self.result.add_failed_backup(project_id, volume_id, reason)
return res
@@ -112,17 +126,18 @@
reason = _("Cancel backup %s because of timeout." % task.backup_id)
LOG.info(reason)
- if project_id not in self.project_list: self.process_non_existing_backup(task)
+ if project_id not in self.project_list:
+ self.process_non_existing_backup(task)
self.openstacksdk.set_project(self.project_list[project_id])
backup = self.openstacksdk.get_backup(task.backup_id)
- if backup == None: return task.delete_queue()
+ if backup is None:
+ return task.delete_queue()
self.openstacksdk.delete_backup(task.backup_id, force=True)
task.delete_queue()
self.result.add_failed_backup(task.project_id, task.volume_id, reason)
except OpenstackSDKException as e:
- reason = _("Backup %s deletion failed."
- "%s" % (task.backup_id, str(e)))
+ reason = _("Backup %s deletion failed." "%s" % (task.backup_id, str(e)))
LOG.info(reason)
# remove from the queue table
task.delete_queue()
@@ -132,9 +147,14 @@
def soft_remove_backup_task(self, backup_object):
try:
backup = self.openstacksdk.get_backup(backup_object.backup_id)
- if backup == None:
- LOG.info(_("Backup %s is not existing in Openstack."
- "Or cinder-backup is not existing in the cloud." % backup_object.backup_id))
+ if backup is None:
+ LOG.info(
+ _(
+ "Backup %s is not existing in Openstack."
+ "Or cinder-backup is not existing in the cloud."
+ % backup_object.backup_id
+ )
+ )
return backup_object.delete_backup()
if backup["status"] in ("available"):
self.openstacksdk.delete_backup(backup_object.backup_id)
@@ -146,13 +166,17 @@
# backup table so user can delete it on Horizon.
backup_object.delete_backup()
else: # "deleting", "restoring"
- LOG.info(_("Rotation for the backup %s is skipped in this cycle "
- "because it is in %s status") % (backup_object.backup_id, backup["status"]))
+ LOG.info(
+ _(
+ "Rotation for the backup %s is skipped in this cycle "
+ "because it is in %s status"
+ )
+ % (backup_object.backup_id, backup["status"])
+ )
except OpenstackSDKException as e:
LOG.info(
- _("Backup %s deletion failed." "%s" % (backup_object.backup_id,
- str(e)))
+ _("Backup %s deletion failed." "%s" % (backup_object.backup_id, str(e)))
)
# TODO(Alex): Add it into the notification queue
# remove from the backup table
@@ -167,11 +191,17 @@
backup_object.delete_backup()
self.openstacksdk.set_project(self.project_list[project_id])
- backup = self.openstacksdk.get_backup(uuid=backup_object.backup_id,
- project_id=project_id)
- if backup == None:
- LOG.info(_("Backup %s is not existing in Openstack."
- "Or cinder-backup is not existing in the cloud." % backup_object.backup_id))
+ backup = self.openstacksdk.get_backup(
+ uuid=backup_object.backup_id, project_id=project_id
+ )
+ if backup is None:
+ LOG.info(
+ _(
+ "Backup %s is not existing in Openstack."
+ "Or cinder-backup is not existing in the cloud."
+ % backup_object.backup_id
+ )
+ )
return backup_object.delete_backup()
self.openstacksdk.delete_backup(uuid=backup_object.backup_id)
@@ -179,8 +209,7 @@
except OpenstackSDKException as e:
LOG.info(
- _("Backup %s deletion failed." "%s" % (backup_object.backup_id,
- str(e)))
+ _("Backup %s deletion failed." "%s" % (backup_object.backup_id, str(e)))
)
# TODO(Alex): Add it into the notification queue
@@ -206,16 +235,22 @@
try:
servers = self.openstacksdk.get_servers(project_id=project.id)
except OpenstackHttpException as ex:
- LOG.warn(_("Failed to list servers in project %s. %s"
- % (project.id, str(ex))))
+ LOG.warn(
+ _(
+ "Failed to list servers in project %s. %s"
+ % (project.id, str(ex))
+ )
+ )
continue
for server in servers:
- if not self.filter_by_server_metadata(server.metadata): continue
+ if not self.filter_by_server_metadata(server.metadata):
+ continue
if empty_project:
empty_project = False
self.result.add_project(project.id, project.name)
for volume in server.attached_volumes:
- if not self.filter_by_volume_status(volume["id"], project.id): continue
+ if not self.filter_by_volume_status(volume["id"], project.id):
+ continue
queues_map.append(
QueueMapping(
project_id=project.id,
@@ -250,21 +285,29 @@
try:
# NOTE(Alex): no need to wait because we have a cycle time out
if project_id not in self.project_list:
- LOG.warn(_("Project ID %s is not existing in project list"
- % project_id))
+ LOG.warn(
+ _("Project ID %s is not existing in project list" % project_id)
+ )
self.process_non_existing_backup(queue)
return
self.openstacksdk.set_project(self.project_list[project_id])
- LOG.info(_("Backup for volume %s creating in project %s"
- % (queue.volume_id, project_id)))
- volume_backup = self.openstacksdk.create_backup(volume_id=queue.volume_id,
- project_id=project_id)
+ LOG.info(
+ _(
+ "Backup for volume %s creating in project %s"
+ % (queue.volume_id, project_id)
+ )
+ )
+ volume_backup = self.openstacksdk.create_backup(
+ volume_id=queue.volume_id, project_id=project_id
+ )
queue.backup_id = volume_backup.id
queue.backup_status = constants.BACKUP_WIP
queue.save()
except OpenstackSDKException as error:
- reason = _("Backup creation for the volume %s failled. %s"
- % (queue.volume_id, str(error)))
+ reason = _(
+ "Backup creation for the volume %s failled. %s"
+ % (queue.volume_id, str(error))
+ )
LOG.info(reason)
self.result.add_failed_backup(project_id, queue.volume_id, reason)
parsed = parse.parse("Error in creating volume backup {id}", str(error))
@@ -274,8 +317,10 @@
queue.save()
# Added extra exception as OpenstackSDKException does not handle the keystone unauthourized issue.
except Exception as error:
- reason = _("Backup creation for the volume %s failled. %s"
- % (queue.volume_id, str(error)))
+ reason = _(
+ "Backup creation for the volume %s failled. %s"
+ % (queue.volume_id, str(error))
+ )
LOG.error(reason)
self.result.add_failed_backup(project_id, queue.volume_id, reason)
parsed = parse.parse("Error in creating volume backup {id}", str(error))
@@ -291,8 +336,9 @@
# backup gen was not created
def process_pre_failed_backup(self, task):
# 1.notify via email
- reason = _("The backup creation for the volume %s was prefailed."
- % task.volume_id)
+ reason = _(
+ "The backup creation for the volume %s was prefailed." % task.volume_id
+ )
self.result.add_failed_backup(task.project_id, task.volume_id, reason)
# LOG.error(reason)
# 2. remove failed task from the task queue
@@ -344,13 +390,16 @@
if queue.backup_id == "NULL":
self.process_pre_failed_backup(queue)
return
- if project_id not in self.project_list: self.process_non_existing_backup(queue)
+ if project_id not in self.project_list:
+ self.process_non_existing_backup(queue)
self.openstacksdk.set_project(self.project_list[project_id])
backup_gen = self.openstacksdk.get_backup(queue.backup_id)
- if backup_gen == None:
+ if backup_gen is None:
# TODO(Alex): need to check when it is none
- LOG.info(_("[Beta] Backup status of %s is returning none." % (queue.backup_id)))
+ LOG.info(
+ _("[Beta] Backup status of %s is returning none." % (queue.backup_id))
+ )
self.process_non_existing_backup(queue)
return
if backup_gen.status == "error":
@@ -362,7 +411,6 @@
else: # "deleting", "restoring", "error_restoring" status
self.process_using_backup(queue)
-
def _volume_backup(self, task):
# matching_backups = [
# g for g in self.available_backups if g.backup_id == task.backup_id
diff --git a/staffeln/conductor/manager.py b/staffeln/conductor/manager.py
index 5388df5..a6e93a9 100755
--- a/staffeln/conductor/manager.py
+++ b/staffeln/conductor/manager.py
@@ -1,12 +1,11 @@
-import cotyledon
-from futurist import periodics
-from oslo_log import log
-import staffeln.conf
import threading
import time
-from staffeln.common import constants
-from staffeln.common import context
+import cotyledon
+import staffeln.conf
+from futurist import periodics
+from oslo_log import log
+from staffeln.common import constants, context
from staffeln.common import time as xtime
from staffeln.conductor import backup
from staffeln.i18n import _
@@ -44,7 +43,7 @@
self.cycle_start_time = xtime.get_current_time()
# loop - take care of backup result while timeout
- while (1):
+ while 1:
queues_started = self.controller.get_queues(
filters={"backup_status": constants.BACKUP_WIP}
)
@@ -53,21 +52,31 @@
break
if not self._backup_cycle_timeout(): # time in
LOG.info(_("cycle timein"))
- for queue in queues_started: self.controller.check_volume_backup_status(queue)
+ for queue in queues_started:
+ self.controller.check_volume_backup_status(queue)
else: # time out
LOG.info(_("cycle timeout"))
- for queue in queues_started: self.controller.hard_cancel_backup_task(queue)
+ for queue in queues_started:
+ self.controller.hard_cancel_backup_task(queue)
break
time.sleep(constants.BACKUP_RESULT_CHECK_INTERVAL)
# if the backup cycle timeout, then return True
def _backup_cycle_timeout(self):
- time_delta_dict = xtime.parse_timedelta_string(CONF.conductor.backup_cycle_timout)
+ time_delta_dict = xtime.parse_timedelta_string(
+ CONF.conductor.backup_cycle_timout
+ )
- if time_delta_dict == None:
- LOG.info(_("Recycle timeout format is invalid. "
- "Follow <YEARS>y<MONTHS>m<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."))
- time_delta_dict = xtime.parse_timedelta_string(constants.DEFAULT_BACKUP_CYCLE_TIMEOUT)
+ if time_delta_dict is None:
+ LOG.info(
+ _(
+ "Recycle timeout format is invalid. "
+ "Follow <YEARS>y<MONTHS>m<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."
+ )
+ )
+ time_delta_dict = xtime.parse_timedelta_string(
+ constants.DEFAULT_BACKUP_CYCLE_TIMEOUT
+ )
rto = xtime.timeago(
years=time_delta_dict["years"],
months=time_delta_dict["months"],
@@ -119,11 +128,14 @@
periodic_callables = [
(backup_tasks, (), {}),
]
- periodic_worker = periodics.PeriodicWorker(periodic_callables, schedule_strategy="last_finished")
+ periodic_worker = periodics.PeriodicWorker(
+ periodic_callables, schedule_strategy="last_finished"
+ )
periodic_thread = threading.Thread(target=periodic_worker.start)
periodic_thread.daemon = True
periodic_thread.start()
+
class RotationManager(cotyledon.Service):
name = "Staffeln conductor rotation controller"
@@ -147,8 +159,11 @@
def get_backup_list(self):
threshold_strtime = self.get_threshold_strtime()
- if threshold_strtime == None: return False
- self.backup_list = self.controller.get_backups(filters={"created_at__lt": threshold_strtime})
+ if threshold_strtime is None:
+ return False
+ self.backup_list = self.controller.get_backups(
+ filters={"created_at__lt": threshold_strtime}
+ )
return True
def remove_backups(self):
@@ -163,15 +178,19 @@
def rotation_tasks():
self.controller.refresh_openstacksdk()
# 1. get the list of backups to remove based on the retention time
- if not self.get_backup_list(): return
+ if not self.get_backup_list():
+ return
# 2. get project list
self.controller.update_project_list()
# 3. remove the backups
self.remove_backups()
+
periodic_callables = [
(rotation_tasks, (), {}),
]
- periodic_worker = periodics.PeriodicWorker(periodic_callables, schedule_strategy="last_finished")
+ periodic_worker = periodics.PeriodicWorker(
+ periodic_callables, schedule_strategy="last_finished"
+ )
periodic_thread = threading.Thread(target=periodic_worker.start)
periodic_thread.daemon = True
periodic_thread.start()
@@ -179,9 +198,13 @@
# get the threshold time str
def get_threshold_strtime(self):
time_delta_dict = xtime.parse_timedelta_string(CONF.conductor.retention_time)
- if time_delta_dict == None:
- LOG.info(_("Retention time format is invalid. "
- "Follow <YEARS>y<MONTHS>m<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."))
+ if time_delta_dict is None:
+ LOG.info(
+ _(
+ "Retention time format is invalid. "
+ "Follow <YEARS>y<MONTHS>m<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."
+ )
+ )
return None
res = xtime.timeago(
diff --git a/staffeln/conductor/result.py b/staffeln/conductor/result.py
index b16bcc6..1414347 100644
--- a/staffeln/conductor/result.py
+++ b/staffeln/conductor/result.py
@@ -1,22 +1,17 @@
# Email notification package
# This should be upgraded by integrating with mail server to send batch
-import smtplib
-from email.mime.text import MIMEText
-from email.mime.multipart import MIMEMultipart
-from oslo_log import log
import staffeln.conf
-from staffeln.common import time as xtime
+from oslo_log import log
from staffeln.common import email
-from staffeln.i18n import _
+from staffeln.common import time as xtime
from staffeln.conductor import backup
-from staffeln.common import openstack as openstacksdk
+from staffeln.i18n import _
CONF = staffeln.conf.CONF
LOG = log.getLogger(__name__)
class BackupResult(object):
-
def __init__(self):
pass
@@ -27,36 +22,39 @@
self.failed_backup_list = {}
def add_project(self, id, name):
- if id in self.success_backup_list: return
- self.project_list.append({
- "name": name,
- "id": id
- })
+ if id in self.success_backup_list:
+ return
+ self.project_list.append({"name": name, "id": id})
self.success_backup_list[id] = []
self.failed_backup_list[id] = []
def add_success_backup(self, project_id, volume_id, backup_id):
- if not project_id in self.success_backup_list:
+ if project_id not in self.success_backup_list:
LOG.error(_("Not registered project is reported for backup result."))
return
- self.success_backup_list[project_id].append({
- "volume_id": volume_id,
- "backup_id": backup_id,
- })
+ self.success_backup_list[project_id].append(
+ {
+ "volume_id": volume_id,
+ "backup_id": backup_id,
+ }
+ )
def add_failed_backup(self, project_id, volume_id, reason):
- if not project_id in self.failed_backup_list:
+ if project_id not in self.failed_backup_list:
LOG.error(_("Not registered project is reported for backup result."))
return
- self.failed_backup_list[project_id].append({
- "volume_id": volume_id,
- "reason": reason,
- })
+ self.failed_backup_list[project_id].append(
+ {
+ "volume_id": volume_id,
+ "reason": reason,
+ }
+ )
def send_result_email(self):
subject = "Backup result"
try:
- if len(CONF.notification.receiver) == 0: return
+ if len(CONF.notification.receiver) == 0:
+ return
email.send(
src_email=CONF.notification.sender_email,
src_pwd=CONF.notification.sender_pwd,
@@ -68,7 +66,12 @@
)
LOG.info(_("Backup result email sent"))
except Exception as e:
- LOG.error(_("Backup result email send failed. Please check email configuration. %s" % (str(e))))
+ LOG.error(
+ _(
+ "Backup result email send failed. Please check email configuration. %s"
+ % (str(e))
+ )
+ )
def publish(self):
# 1. get quota
@@ -78,16 +81,17 @@
for project in self.project_list:
quota = backup.Backup().get_backup_quota(project["id"])
- html += "<h3>Project: ${PROJECT}</h3><br>" \
- "<h3>Quota Usage</h3><br>" \
- "<h4>Limit: ${QUOTA_LIMIT}, In Use: ${QUOTA_IN_USE}, Reserved: ${QUOTA_RESERVED}</h4><br>" \
- "<h3>Success List</h3><br>" \
- "<h4>${SUCCESS_VOLUME_LIST}</h4><br>" \
- "<h3>Failed List</h3><br>" \
- "<h4>${FAILED_VOLUME_LIST}</h4><br>"
+ html += (
+ "<h3>Project: ${PROJECT}</h3><br>"
+ "<h3>Quota Usage</h3><br>"
+ "<h4>Limit: ${QUOTA_LIMIT}, In Use: ${QUOTA_IN_USE}, Reserved: ${QUOTA_RESERVED}</h4><br>"
+ "<h3>Success List</h3><br>"
+ "<h4>${SUCCESS_VOLUME_LIST}</h4><br>"
+ "<h3>Failed List</h3><br>"
+ "<h4>${FAILED_VOLUME_LIST}</h4><br>"
+ )
success_volumes = "<br>".join(
-
[
"Volume ID: %s, Backup ID: %s"
% (str(e["volume_id"]), str(e["backup_id"]))
@@ -108,6 +112,7 @@
html = html.replace("${SUCCESS_VOLUME_LIST}", success_volumes)
html = html.replace("${FAILED_VOLUME_LIST}", failed_volumes)
html = html.replace("${PROJECT}", project["name"])
- if html == "": return
+ if html == "":
+ return
self.content += html
self.send_result_email()
diff --git a/staffeln/conf/__init__.py b/staffeln/conf/__init__.py
index 3359185..3289b63 100755
--- a/staffeln/conf/__init__.py
+++ b/staffeln/conf/__init__.py
@@ -1,10 +1,5 @@
from oslo_config import cfg
-
-from staffeln.conf import api
-from staffeln.conf import conductor
-from staffeln.conf import database
-from staffeln.conf import notify
-from staffeln.conf import paths
+from staffeln.conf import api, conductor, database, notify, paths
CONF = cfg.CONF
diff --git a/staffeln/conf/conductor.py b/staffeln/conf/conductor.py
index 69f7fe2..3924beb 100755
--- a/staffeln/conf/conductor.py
+++ b/staffeln/conf/conductor.py
@@ -12,8 +12,10 @@
cfg.IntOpt(
"backup_workers",
default=1,
- help=_("The maximum number of backup processes to "
- "fork and run. Default to number of CPUs on the host."),
+ help=_(
+ "The maximum number of backup processes to "
+ "fork and run. Default to number of CPUs on the host."
+ ),
),
cfg.IntOpt(
"backup_service_period",
@@ -23,10 +25,15 @@
),
cfg.StrOpt(
"backup_cycle_timout",
- regex=r'((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?',
+ regex=(
+ r"((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?"
+ r"((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?"
+ ),
default=constants.DEFAULT_BACKUP_CYCLE_TIMEOUT,
- help=_("The duration while the backup cycle waits backups."
- "<YEARS>y<MONTHS>mon<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."),
+ help=_(
+ "The duration while the backup cycle waits backups."
+ "<YEARS>y<MONTHS>mon<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."
+ ),
),
cfg.StrOpt(
"backup_metadata_key",
@@ -38,8 +45,10 @@
cfg.IntOpt(
"rotation_workers",
default=1,
- help=_("The maximum number of rotation processes to "
- "fork and run. Default to number of CPUs on the host."),
+ help=_(
+ "The maximum number of rotation processes to "
+ "fork and run. Default to number of CPUs on the host."
+ ),
),
cfg.IntOpt(
"retention_service_period",
@@ -50,15 +59,22 @@
cfg.IntOpt(
"rotation_workers",
default=1,
- help=_("The maximum number of rotation processes to "
- "fork and run. Default to number of CPUs on the host."),
+ help=_(
+ "The maximum number of rotation processes to "
+ "fork and run. Default to number of CPUs on the host."
+ ),
),
cfg.StrOpt(
"retention_time",
- regex=r'((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?',
+ regex=(
+ r"((?P<years>\d+?)y)?((?P<months>\d+?)mon)?((?P<weeks>\d+?)w)?"
+ r"((?P<days>\d+?)d)?((?P<hours>\d+?)h)?((?P<minutes>\d+?)min)?((?P<seconds>\d+?)s)?"
+ ),
default="2w3d",
- help=_("The time of retention period, the for mat is "
- "<YEARS>y<MONTHS>mon<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."),
+ help=_(
+ "The time of retention period, the for mat is "
+ "<YEARS>y<MONTHS>mon<WEEKS>w<DAYS>d<HOURS>h<MINUTES>min<SECONDS>s."
+ ),
),
]
diff --git a/staffeln/conf/notify.py b/staffeln/conf/notify.py
index 13375f1..890796a 100644
--- a/staffeln/conf/notify.py
+++ b/staffeln/conf/notify.py
@@ -1,7 +1,6 @@
from oslo_config import cfg
from staffeln.i18n import _
-
notify_group = cfg.OptGroup(
"notification",
title="Notification options",
diff --git a/staffeln/conf/paths.py b/staffeln/conf/paths.py
index 8d403c5..7dbd9a1 100644
--- a/staffeln/conf/paths.py
+++ b/staffeln/conf/paths.py
@@ -1,9 +1,8 @@
+import os
+
from oslo_config import cfg
from staffeln.i18n import _
-
-import os
-
PATH_OPTS = [
cfg.StrOpt(
"pybasedir",
diff --git a/staffeln/db/api.py b/staffeln/db/api.py
index db0255f..2d10a05 100644
--- a/staffeln/db/api.py
+++ b/staffeln/db/api.py
@@ -1,6 +1,4 @@
"""Base classes for storage engines"""
-
-import abc
from oslo_config import cfg
from oslo_db import api as db_api
@@ -11,7 +9,3 @@
def get_instance():
"""Return a DB API instance."""
return IMPL
-
-
-# class BaseConnection(object, metaclass=abc.ABCMeta):
- """Base class for storage system connections."""
diff --git a/staffeln/db/migration.py b/staffeln/db/migration.py
index 176d66a..c75952d 100644
--- a/staffeln/db/migration.py
+++ b/staffeln/db/migration.py
@@ -1,8 +1,6 @@
"""Database setup command"""
-
-
-from stevedore import driver
import staffeln.conf
+from stevedore import driver
CONF = staffeln.conf.CONF
@@ -12,8 +10,9 @@
def get_backend():
global _IMPL
if not _IMPL:
- _IMPL = driver.DriverManager("staffeln.database.migration_backend",
- CONF.database.backend).driver
+ _IMPL = driver.DriverManager(
+ "staffeln.database.migration_backend", CONF.database.backend
+ ).driver
return _IMPL
diff --git a/staffeln/db/sqlalchemy/api.py b/staffeln/db/sqlalchemy/api.py
index 0148c3d..adfa7a7 100644
--- a/staffeln/db/sqlalchemy/api.py
+++ b/staffeln/db/sqlalchemy/api.py
@@ -4,19 +4,15 @@
import operator
from oslo_config import cfg
-from oslo_log import log
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy import session as db_session
from oslo_db.sqlalchemy import utils as db_utils
-from oslo_utils import timeutils
-from oslo_utils import strutils
-from oslo_utils import uuidutils
+from oslo_log import log
+from oslo_utils import strutils, timeutils, uuidutils
from sqlalchemy.inspection import inspect
from sqlalchemy.orm import exc
-
-from staffeln.db.sqlalchemy import models
from staffeln.common import short_id
-
+from staffeln.db.sqlalchemy import models
LOG = log.getLogger(__name__)
@@ -109,14 +105,13 @@
if filters is None:
filters = {}
-
plain_fields = [
"volume_id",
"backup_id",
"project_id",
"backup_completed",
"instance_id",
- "created_at"
+ "created_at",
]
return self._add_filters(
@@ -148,7 +143,7 @@
def _add_filters(self, query, model, filters=None, plain_fields=None):
"""Add filters while listing the columns from database table"""
- timestamp_mixin_fields = ["created_at", "updated_at"]
+ # timestamp_mixin_fields = ["created_at", "updated_at"]
filters = filters or {}
for raw_fieldname, value in filters.items():
@@ -231,7 +226,7 @@
except exc.NoResultFound:
LOG.error("Resource Not found.")
- deleted_row = session.delete(row)
+ session.delete(row)
return row
def _get_model_list(
@@ -271,7 +266,7 @@
try:
return self._update(models.Backup_data, backup_id, values)
- except:
+ except: # noqa: E722
LOG.error("backup resource not found.")
def create_queue(self, values):
@@ -293,7 +288,7 @@
try:
return self._update(models.Queue_data, id, values)
- except:
+ except: # noqa: E722
LOG.error("Queue resource not found.")
def get_queue_by_id(self, context, id):
@@ -308,22 +303,21 @@
return self._get(
context, model=models.Queue_data, fieldname=fieldname, value=value
)
- except:
+ except: # noqa: E722
LOG.error("Queue not found")
def soft_delete_queue(self, id):
try:
return self._soft_delete(models.Queue_data, id)
- except:
+ except: # noqa: E722
LOG.error("Queue Not found.")
-
def get_backup_by_backup_id(self, context, backup_id):
"""Get the column from the backup_data with matching backup_id"""
try:
return self._get_backup(context, fieldname="backup_id", value=backup_id)
- except:
+ except: # noqa: E722
LOG.error("Backup not found with backup_id %s." % backup_id)
def _get_backup(self, context, fieldname, value):
@@ -333,12 +327,11 @@
return self._get(
context, model=models.Backup_data, fieldname=fieldname, value=value
)
- except:
+ except: # noqa: E722
LOG.error("Backup resource not found.")
-
def soft_delete_backup(self, id):
try:
return self._soft_delete(models.Backup_data, id)
- except:
+ except: # noqa: E722
LOG.error("Backup Not found.")
diff --git a/staffeln/db/sqlalchemy/migration.py b/staffeln/db/sqlalchemy/migration.py
index e836b25..4c46672 100644
--- a/staffeln/db/sqlalchemy/migration.py
+++ b/staffeln/db/sqlalchemy/migration.py
@@ -1,5 +1,3 @@
-import os
-
from staffeln.db.sqlalchemy import api as sqla_api
from staffeln.db.sqlalchemy import models
diff --git a/staffeln/db/sqlalchemy/models.py b/staffeln/db/sqlalchemy/models.py
index 93dae9f..a93bd6e 100644
--- a/staffeln/db/sqlalchemy/models.py
+++ b/staffeln/db/sqlalchemy/models.py
@@ -1,24 +1,11 @@
"""
SQLAlchemy models for staffeln service
"""
+import urllib.parse as urlparse
from oslo_db.sqlalchemy import models
-from oslo_serialization import jsonutils
-from sqlalchemy import Boolean
-from sqlalchemy import Column
-from sqlalchemy import DateTime
+from sqlalchemy import Column, Integer, String, UniqueConstraint
from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy import Float
-from sqlalchemy import ForeignKey
-from sqlalchemy import Integer
-from sqlalchemy import LargeBinary
-from sqlalchemy import orm
-from sqlalchemy import Numeric
-from sqlalchemy import String
-from sqlalchemy import Text
-from sqlalchemy.types import TypeDecorator, TEXT
-from sqlalchemy import UniqueConstraint
-import urllib.parse as urlparse
from staffeln import conf
CONF = conf.CONF
diff --git a/staffeln/i18n.py b/staffeln/i18n.py
index 462c91c..09fe8aa 100755
--- a/staffeln/i18n.py
+++ b/staffeln/i18n.py
@@ -4,7 +4,7 @@
import oslo_i18n
-DOMAIN = 'staffeln'
+DOMAIN = "staffeln"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
diff --git a/staffeln/objects/__init__.py b/staffeln/objects/__init__.py
index 4f7ca56..b79e47b 100755
--- a/staffeln/objects/__init__.py
+++ b/staffeln/objects/__init__.py
@@ -1,6 +1,8 @@
-from .queue import Queue
-from .volume import Volume
+from .queue import Queue # noqa: F401
+from .volume import Volume # noqa: F401
+
+
# from volume import Volume
def register_all():
- __import__('staffeln.objects.volume')
- __import__('staffeln.objects.queue')
+ __import__("staffeln.objects.volume")
+ __import__("staffeln.objects.queue")
diff --git a/staffeln/objects/base.py b/staffeln/objects/base.py
index dd0730d..8dd6f94 100755
--- a/staffeln/objects/base.py
+++ b/staffeln/objects/base.py
@@ -3,10 +3,8 @@
from oslo_utils import versionutils
from oslo_versionedobjects import base as ovoo_base
from oslo_versionedobjects import fields as ovoo_fields
-
from staffeln import objects
-
remotable_classmethod = ovoo_base.remotable_classmethod
remotable = ovoo_base.remotable
diff --git a/staffeln/objects/fields.py b/staffeln/objects/fields.py
index bbf13e2..3f6c2a7 100644
--- a/staffeln/objects/fields.py
+++ b/staffeln/objects/fields.py
@@ -1,11 +1,7 @@
"""Utility method for objects"""
-
-import ast
-
from oslo_serialization import jsonutils
from oslo_versionedobjects import fields
-
BooleanField = fields.BooleanField
StringField = fields.StringField
DateTimeField = fields.DateTimeField
diff --git a/staffeln/objects/queue.py b/staffeln/objects/queue.py
index 0137d4e..8bdebc7 100644
--- a/staffeln/objects/queue.py
+++ b/staffeln/objects/queue.py
@@ -1,4 +1,3 @@
-from staffeln.common import short_id
from staffeln.db import api as db_api
from staffeln.objects import base
from staffeln.objects import fields as sfeild
@@ -22,12 +21,12 @@
}
@base.remotable_classmethod
- def list(cls, context, filters=None):
+ def list(cls, context, filters=None): # pylint: disable=E0213
db_queue = cls.dbapi.get_queue_list(context, filters=filters)
return [cls._from_db_object(cls(context), obj) for obj in db_queue]
@base.remotable_classmethod
- def get_by_id(cls, context, id):
+ def get_by_id(cls, context, id): # pylint: disable=E0213
"""Find a backup based on backup_id
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
@@ -65,4 +64,4 @@
@base.remotable
def delete_queue(self):
"""Soft Delete the :class:`Queue_data` from the DB"""
- db_obj = self.dbapi.soft_delete_queue(self.id)
+ self.dbapi.soft_delete_queue(self.id)
diff --git a/staffeln/objects/volume.py b/staffeln/objects/volume.py
index ce300af..0680c78 100644
--- a/staffeln/objects/volume.py
+++ b/staffeln/objects/volume.py
@@ -21,7 +21,7 @@
}
@base.remotable_classmethod
- def list(cls, context, filters=None):
+ def list(cls, context, filters=None): # pylint: disable=E0213
"""Return a list of :class:`Backup` objects.
:param filters: dict mapping the filter to a value.
@@ -63,10 +63,10 @@
@base.remotable
def delete_backup(self):
"""Soft Delete the :class:`Queue_data` from the DB"""
- db_obj = self.dbapi.soft_delete_backup(self.id)
+ self.dbapi.soft_delete_backup(self.id)
@base.remotable_classmethod
- def get_backup_by_backup_id(cls, context, backup_id):
+ def get_backup_by_backup_id(cls, context, backup_id): # pylint: disable=E0213
"""Find a backup based on backup_id
:param context: Security context. NOTE: This should only
be used internally by the indirection_api.
@@ -83,4 +83,3 @@
else:
backup = cls._from_db_object(cls(context), db_backup)
return backup
-
\ No newline at end of file
diff --git a/staffeln/tests/test_staffeln.py b/staffeln/tests/test_staffeln.py
index a8b6e32..6c7c5f3 100755
--- a/staffeln/tests/test_staffeln.py
+++ b/staffeln/tests/test_staffeln.py
@@ -23,6 +23,5 @@
class TestStaffeln(base.TestCase):
-
def test_something(self):
pass
diff --git a/staffeln/version.py b/staffeln/version.py
index 0f9e8d2..efe79df 100755
--- a/staffeln/version.py
+++ b/staffeln/version.py
@@ -1,5 +1,4 @@
import pbr.version
-
-version_info = pbr.version.VersionInfo('staffeln')
+version_info = pbr.version.VersionInfo("staffeln")
version_string = version_info.version_string