diff --git a/.gitignore b/.gitignore index 4b0c713b..dbd533b4 100644 --- a/.gitignore +++ b/.gitignore @@ -10,5 +10,14 @@ # Sublime files *.sublime-* +# Log files *.log.[0-9] +*.log + +# Tox files .tox/ + +# Script generated files +*.db +*.lck + diff --git a/requirements/base.txt b/requirements/base.txt index 5ba4c0a8..30ea4201 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -2,3 +2,4 @@ metsrw==0.2.0 requests<3.0 sqlalchemy six +urllib3 diff --git a/tests/test_amclient.py b/tests/test_amclient.py index 2c379d89..19fdfe6d 100644 --- a/tests/test_amclient.py +++ b/tests/test_amclient.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """To run the tests:: $ python -m unittest tests.test_amclient @@ -11,6 +10,7 @@ import vcr from transfers import amclient +from transfers import errors AM_URL = 'http://192.168.168.192' @@ -24,6 +24,7 @@ class TmpDir: + """Context manager to clear and create a temporary directory and destroy it after usage. """ @@ -118,7 +119,7 @@ def test_completed_transfers_bad_key(self): completed_transfers = amclient.AMClient( am_api_key='bad api key', am_user_name=AM_USER_NAME, am_url=AM_URL).completed_transfers() - assert completed_transfers is None + assert completed_transfers is errors.ERR_INVALID_RESPONSE @vcr.use_cassette( 'fixtures/vcr_cassettes/unapproved_transfers_transfers.yaml') diff --git a/tox.ini b/tox.ini index 6b6aa33c..21825c66 100644 --- a/tox.ini +++ b/tox.ini @@ -19,4 +19,5 @@ commands = flake8 . exclude = .git, .tox, __pycache__, old, build, dist ignore = E501 # Lines are too long + E402 # Module level imports not at top of file import-order-style = pep8 diff --git a/transfers/amclient.py b/transfers/amclient.py index 0817ba24..105f490a 100755 --- a/transfers/amclient.py +++ b/transfers/amclient.py @@ -1,224 +1,44 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- + """Archivematica Client. Module and CLI that holds functionality for interacting with the various Archivematica APIs. """ + from __future__ import print_function, unicode_literals -import argparse import binascii import base64 -from collections import defaultdict, namedtuple +from collections import defaultdict import json -import logging -import logging.config # Has to be imported separately import os import pprint import re import sys import requests -from six import binary_type, text_type - - -try: - from os import fsencode -except ImportError: - def fsencode(filename): - """Cribbed & modified from Python3's OS module to support Python2.""" - encoding = sys.getfilesystemencoding() - if isinstance(filename, binary_type): - return filename - elif isinstance(filename, text_type): - return filename.encode(encoding) - else: - raise TypeError("expect bytes or str, not %s" % - type(filename).__name__) - - -THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -DEFAULT_LOGFILE = os.path.join(THIS_DIR, 'amclient.log') -LOGGER = logging.getLogger('amclient') -RETRY_COUNT = 5 -DEF_AM_URL = 'http://127.0.0.1' -DEF_SS_URL = 'http://127.0.0.1:8000' -DEF_USER_NAME = 'test' -UUID_PATT = re.compile( - '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') -UNDECODABLE = 'UNABLE TO DECODE' -UNDEC_MSG = ('Unable to decode a transfer source component; giving up and' - ' returning {0}'.format(UNDECODABLE)) - - -# Reusable argument constants (for CLI). -Arg = namedtuple('Arg', ['name', 'help', 'type']) -AIP_UUID = Arg( - name='aip_uuid', - help='UUID of the target AIP', - type=None) -AM_API_KEY = Arg( - name='am_api_key', - help='Archivematica API key', - type=None) -DIP_UUID = Arg( - name='dip_uuid', - help='UUID of the target DIP', - type=None) -SS_API_KEY = Arg( - name='ss_api_key', - help='Storage Service API key', - type=None) -TRANSFER_SOURCE = Arg( - name='transfer_source', - help='Transfer source UUID', - type=None) - - -# Reusable option constants (for CLI). -Opt = namedtuple('Opt', ['name', 'metavar', 'help', 'default', 'type']) -AM_URL = Opt( - name='am-url', - metavar='URL', - help='Archivematica URL. Default: {0}'.format(DEF_AM_URL), - default=DEF_AM_URL, - type=None) -AM_USER_NAME = Opt( - name='am-user-name', - metavar='USERNAME', - help='Archivematica username. Default: {0}'.format(DEF_USER_NAME), - default=DEF_USER_NAME, - type=None) -DIRECTORY = Opt( - name='directory', - metavar='DIR', - help='Directory path to save the DIP in', - default=None, - type=None) -OUTPUT_MODE = Opt( - name='output-mode', - metavar='MODE', - help='How to print output, JSON (default) or Python', - default='json', - type=None) -SS_URL = Opt( - name='ss-url', - metavar='URL', - help='Storage Service URL. Default: {0}'.format(DEF_SS_URL), - default=DEF_SS_URL, - type=None) -SS_USER_NAME = Opt( - name='ss-user-name', - metavar='USERNAME', - help='Storage Service username. Default: {0}'.format(DEF_USER_NAME), - default=DEF_USER_NAME, - type=None) -TRANSFER_PATH = Opt( - name='transfer-path', - metavar='PATH', - help='Relative path within the Transfer Source. Default: ""', - default=b'', - type=fsencode) - - -# Sub-command configuration: give them a name, help text, a tuple of ``Arg`` -# instances and a tuple of ``Opts`` instances. -SubCommand = namedtuple('SubCommand', ['name', 'help', 'args', 'opts']) -SUBCOMMANDS = ( - SubCommand( - name='close-completed-transfers', - help='Close all completed transfers.', - args=(AM_API_KEY,), - opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) - ), - SubCommand( - name='close-completed-ingests', - help='Close all completed ingests.', - args=(AM_API_KEY,), - opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) - ), - SubCommand( - name='completed-transfers', - help='Print all completed transfers.', - args=(AM_API_KEY,), - opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) - ), - SubCommand( - name='completed-ingests', - help='Print all completed ingests.', - args=(AM_API_KEY,), - opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) - ), - SubCommand( - name='unapproved-transfers', - help='Print all unapproved transfers.', - args=(AM_API_KEY,), - opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) - ), - SubCommand( - name='transferables', - help='Print all transferable entities in the Storage Service.', - args=(SS_API_KEY, TRANSFER_SOURCE), - opts=(SS_USER_NAME, SS_URL, TRANSFER_PATH, OUTPUT_MODE) - ), - SubCommand( - name='aips', - help='Print all AIPs in the Storage Service.', - args=(SS_API_KEY,), - opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) - ), - SubCommand( - name='dips', - help='Print all DIPs in the Storage Service.', - args=(SS_API_KEY,), - opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) - ), - SubCommand( - name='aips2dips', - help='Print all AIPs in the Storage Service along with their corresponding DIPs.', - args=(SS_API_KEY,), - opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) - ), - SubCommand( - name='aip2dips', - help='Print the AIP with AIP_UUID along with its corresponding DIP(s).', - args=(AIP_UUID, SS_API_KEY), - opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) - ), - SubCommand( - name='download-dip', - help='Download the DIP with DIP_UUID.', - args=(DIP_UUID, SS_API_KEY), - opts=(SS_USER_NAME, SS_URL, DIRECTORY, OUTPUT_MODE) - ) -) - - -def get_parser(): - """Parse arguments according to the ``SUBCOMMANDS`` configuration. Return - an argparse ``Namespace`` instance representing the parsed arguments. - """ - parser = argparse.ArgumentParser( - description='Archivematica Client', - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '--log-file', metavar='FILE', help='logfile', default=DEFAULT_LOGFILE) - parser.add_argument( - '--log-level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'], - default='INFO', help='Set the debugging output level.') - subparsers = parser.add_subparsers(help='sub-command help', - dest='subcommand') - for subcommand in SUBCOMMANDS: - subparser = subparsers.add_parser(subcommand.name, - help=subcommand.help) - for arg in subcommand.args: - subparser.add_argument( - arg.name, help=arg.help, type=arg.type) - for opt in subcommand.opts: - subparser.add_argument( - '--' + opt.name, metavar=opt.metavar, help=opt.help, - default=opt.default, type=opt.type) - return parser +import urllib3 + +# AM Client module configuration + +# Allow execution as an executable and the script to be run at package level +# by ensuring that it can see itself. +sys.path.append('../') + +from transfers import loggingconfig +from transfers import defaults +from transfers import amclientargs +from transfers import errors + + +def get_logger(log_file_name, log_level): + return loggingconfig.setup(log_level, log_file_name, "amclient") + + +# Default logging if no other logging is provided in the class. +LOGGER = get_logger(defaults.AMCLIENT_LOG_FILE, defaults.DEFAULT_LOG_LEVEL) def _call_url_json(url, params, method='GET'): @@ -229,21 +49,30 @@ def _call_url_json(url, params, method='GET'): """ method = method.upper() LOGGER.debug('URL: %s; params: %s; method: %s', url, params, method) - response = requests.request(method, url=url, params=params) - LOGGER.debug('Response: %s', response) - LOGGER.debug('type(response.text): %s ', type(response.text)) - LOGGER.debug('Response content-type: %s', response.headers['content-type']) - if not response.ok: - LOGGER.warning('%s Request to %s returned %s %s', method, url, - response.status_code, response.reason) - LOGGER.debug('Response: %s', response.text) - return None + try: - return response.json() - except ValueError: # JSON could not be decoded - LOGGER.warning('Could not parse JSON from response: %s', - response.text) - return None + response = requests.request(method, url=url, params=params) + LOGGER.debug('Response: %s', response) + LOGGER.debug('type(response.text): %s ', type(response.text)) + LOGGER.debug('Response content-type: %s', + response.headers['content-type']) + + if not response.ok: + LOGGER.warning('%s Request to %s returned %s %s', method, url, + response.status_code, response.reason) + LOGGER.debug('Response: %s', response.text) + return errors.ERR_INVALID_RESPONSE + try: + return response.json() + except ValueError: # JSON could not be decoded + LOGGER.warning('Could not parse JSON from response: %s', + response.text) + return errors.ERR_PARSE_JSON + + except (urllib3.exceptions.NewConnectionError, + requests.exceptions.ConnectionError) as e: + LOGGER.error("Connection error %s", e) + return errors.ERR_SERVER_CONN def b64decode_ts_location_browse(result): @@ -268,17 +97,17 @@ def dec(thing): try: import chardet except ImportError: - LOGGER.debug(UNDEC_MSG) - return UNDECODABLE + LOGGER.debug(defaults.UNDEC_MSG) + return defaults.UNDECODABLE encoding = chardet.detect(thing).get('encoding') if encoding: try: return thing.decode(encoding) except ValueError: - LOGGER.debug(UNDEC_MSG) - return UNDECODABLE - LOGGER.debug(UNDEC_MSG) - return UNDECODABLE + LOGGER.debug(defaults.UNDEC_MSG) + return defaults.UNDECODABLE + LOGGER.debug(defaults.UNDEC_MSG) + return defaults.UNDECODABLE try: result['directories'] = [dec(d) for d in result['directories']] @@ -292,46 +121,8 @@ def dec(thing): return result -def setup_logger(log_file, log_level): - logging.config.dictConfig({ - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'default': { - 'format': ('%(levelname)-8s %(asctime)s ' - '%(filename)s:%(lineno)-4s %(message)s'), - 'datefmt': '%Y-%m-%d %H:%M:%S', - }, - }, - 'handlers': { - 'file': { - 'class': 'logging.handlers.RotatingFileHandler', - 'formatter': 'default', - 'filename': log_file, - 'backupCount': 2, - 'maxBytes': 10 * 1024, - }, - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'default', - 'level': 'WARNING' - } - }, - 'loggers': { - 'amclient': { - 'level': log_level, - 'handlers': ['file'], - }, - 'requests.packages.urllib3': { - 'level': log_level, - 'handlers': ['file'], - } - }, - }) - - def is_uuid(thing): - return UUID_PATT.search(thing) is not None + return defaults.UUID_PATT.search(thing) is not None class AMClient: @@ -583,14 +374,36 @@ def download_aip(self): def main(): - parser = get_parser() - args = parser.parse_args() - setup_logger(args.log_file, args.log_level) + + argparser = amclientargs.get_parser() + + # Python 2.x, ensures that help is printed consistently like we see in + # Python 3.x. + if len(sys.argv) < 2: + argparser.print_help() + sys.exit(0) + + args = argparser.parse_args() am_client = AMClient(**vars(args)) + + # Re-configure global LOGGER based on user provided parameters. + global LOGGER + LOGGER = get_logger(args.log_file, args.log_level) + try: - getattr(am_client, 'print_{0}'.format(args.subcommand.replace('-', '_'))) + func = getattr(am_client, args.subcommand.replace('-', '_')) + if func: + res = func() + if isinstance(res, dict): + print(res) + return + else: + if res in errors.error_lookup: + return print(errors.error_lookup[res]) + sys.exit(errors.error_lookup[errors.ERR_AMCLIENT_UNKNOWN]) + except AttributeError: - parser.print_help() + argparser.print_help() sys.exit(0) diff --git a/transfers/amclientargs.py b/transfers/amclientargs.py new file mode 100644 index 00000000..4a533767 --- /dev/null +++ b/transfers/amclientargs.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Archivematica Client Argument Parser + +import sys +import argparse +from collections import namedtuple + +from six import binary_type, text_type + +# AM Client Module Configuration. + +# Allow execution as an executable and the script to be run at package level +# by ensuring that it can see itself. +sys.path.append('../') + +from transfers import defaults + +try: + from os import fsencode +except ImportError: + def fsencode(filename): + """Cribbed & modified from Python3's OS module to support Python2.""" + encoding = sys.getfilesystemencoding() + if isinstance(filename, binary_type): + return filename + elif isinstance(filename, text_type): + return filename.encode(encoding) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +# Reusable argument constants (for CLI). +Arg = namedtuple('Arg', ['name', 'help', 'type']) +AIP_UUID = Arg( + name='aip_uuid', + help='UUID of the target AIP', + type=None) +AM_API_KEY = Arg( + name='am_api_key', + help='Archivematica API key', + type=None) +DIP_UUID = Arg( + name='dip_uuid', + help='UUID of the target DIP', + type=None) +SS_API_KEY = Arg( + name='ss_api_key', + help='Storage Service API key', + type=None) +TRANSFER_SOURCE = Arg( + name='transfer_source', + help='Transfer source UUID', + type=None) + +# Reusable option constants (for CLI). +Opt = namedtuple('Opt', ['name', 'metavar', 'help', 'default', 'type']) +AM_URL = Opt( + name='am-url', + metavar='URL', + help='Archivematica URL. Default: {0}'.format(defaults.DEF_AM_URL), + default=defaults.DEF_AM_URL, + type=None) +AM_USER_NAME = Opt( + name='am-user-name', + metavar='USERNAME', + help='Archivematica username. Default: {0}'.format(defaults.DEF_USER_NAME), + default=defaults.DEF_USER_NAME, + type=None) +DIRECTORY = Opt( + name='directory', + metavar='DIR', + help='Directory path to save the DIP in', + default=None, + type=None) +OUTPUT_MODE = Opt( + name='output-mode', + metavar='MODE', + help='How to print output, JSON (default) or Python', + default='json', + type=None) +SS_URL = Opt( + name='ss-url', + metavar='URL', + help='Storage Service URL. Default: {0}'.format(defaults.DEF_SS_URL), + default=defaults.DEF_SS_URL, + type=None) +SS_USER_NAME = Opt( + name='ss-user-name', + metavar='USERNAME', + help='Storage Service username. Default: {0}'.format( + defaults.DEF_USER_NAME), + default=defaults.DEF_USER_NAME, + type=None) +TRANSFER_PATH = Opt( + name='transfer-path', + metavar='PATH', + help='Relative path within the Transfer Source. Default: ""', + default=b'', + type=fsencode) + + +# Sub-command configuration: give them a name, help text, a tuple of ``Arg`` +# instances and a tuple of ``Opts`` instances. +SubCommand = namedtuple('SubCommand', ['name', 'help', 'args', 'opts']) +SUBCOMMANDS = ( + SubCommand( + name='close-completed-transfers', + help='Close all completed transfers.', + args=(AM_API_KEY,), + opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) + ), + SubCommand( + name='close-completed-ingests', + help='Close all completed ingests.', + args=(AM_API_KEY,), + opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) + ), + SubCommand( + name='completed-transfers', + help='Print all completed transfers.', + args=(AM_API_KEY,), + opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) + ), + SubCommand( + name='completed-ingests', + help='Print all completed ingests.', + args=(AM_API_KEY,), + opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) + ), + SubCommand( + name='unapproved-transfers', + help='Print all unapproved transfers.', + args=(AM_API_KEY,), + opts=(AM_USER_NAME, AM_URL, OUTPUT_MODE) + ), + SubCommand( + name='transferables', + help='Print all transferable entities in the Storage Service.', + args=(SS_API_KEY, TRANSFER_SOURCE), + opts=(SS_USER_NAME, SS_URL, TRANSFER_PATH, OUTPUT_MODE) + ), + SubCommand( + name='aips', + help='Print all AIPs in the Storage Service.', + args=(SS_API_KEY,), + opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) + ), + SubCommand( + name='dips', + help='Print all DIPs in the Storage Service.', + args=(SS_API_KEY,), + opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) + ), + SubCommand( + name='aips2dips', + help='Print all AIPs in the Storage Service along with their \ + corresponding DIPs.', + args=(SS_API_KEY,), + opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) + ), + SubCommand( + name='aip2dips', + help=('Print the AIP with AIP_UUID along with its corresponding ' + 'DIP(s).'), + args=(AIP_UUID, SS_API_KEY), + opts=(SS_USER_NAME, SS_URL, OUTPUT_MODE) + ), + SubCommand( + name='download-dip', + help='Download the DIP with DIP_UUID.', + args=(DIP_UUID, SS_API_KEY), + opts=(SS_USER_NAME, SS_URL, DIRECTORY, OUTPUT_MODE) + ) +) + + +def get_parser(): + """Parse arguments according to the ``SUBCOMMANDS`` configuration. Return + an argparse ``Namespace`` instance representing the parsed arguments. + """ + parser = argparse.ArgumentParser( + description='Archivematica Client', + formatter_class=argparse.RawDescriptionHelpFormatter) + parser.add_argument( + '--log-file', metavar='FILE', help='logfile', + default=defaults.AMCLIENT_LOG_FILE) + parser.add_argument( + '--log-level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'], + default=defaults.DEFAULT_LOG_LEVEL, + help='Set the debugging output level.') + subparsers = parser.add_subparsers(help='sub-command help', + dest='subcommand', metavar="") + for subcommand in SUBCOMMANDS: + subparser = subparsers.add_parser(subcommand.name, + help=subcommand.help) + for arg in subcommand.args: + subparser.add_argument( + arg.name, help=arg.help, type=arg.type) + for opt in subcommand.opts: + subparser.add_argument( + '--' + opt.name, metavar=opt.metavar, help=opt.help, + default=opt.default, type=opt.type) + return parser diff --git a/transfers/defaults.py b/transfers/defaults.py new file mode 100644 index 00000000..200f13d3 --- /dev/null +++ b/transfers/defaults.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Defaults and constants for amclient.py and transfers.py + +import os +import re + +DEF_AM_URL = 'http://127.0.0.1:6208' +DEF_SS_URL = 'http://127.0.0.1:6208' +DEF_USER_NAME = 'test' + +UUID_PATT = re.compile( + '^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$') + +UNDECODABLE = 'UNABLE TO DECODE' +UNDEC_MSG = ('Unable to decode a transfer source component; giving up and' + ' returning {0}'.format(UNDECODABLE)) + +# Default logging for thee module. +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + +# Global for logfile if not set. +AMCLIENT_LOG_FILE = os.path.join(THIS_DIR, 'amclient.log') +TRANSFER_LOG_FILE = os.path.join(THIS_DIR, 'automate-transfer.log') + +# Default log level. +DEFAULT_LOG_LEVEL = "INFO" diff --git a/transfers/errors.py b/transfers/errors.py new file mode 100644 index 00000000..8adf979d --- /dev/null +++ b/transfers/errors.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +ERR_INVALID_RESPONSE = 1 +ERR_PARSE_JSON = 2 +ERR_SERVER_CONN = 3 +ERR_AMCLIENT_UNKNOWN = -1 + +error_lookup = { + ERR_INVALID_RESPONSE: "Invalid response form server, check amclient log", + ERR_PARSE_JSON: "Could not parse JSON resposne, check amclient log", + ERR_SERVER_CONN: "Error connecting to the server, check amclient log", + ERR_AMCLIENT_UNKNOWN: "Unknown return from amclient, check logs."} diff --git a/transfers/loggingconfig.py b/transfers/loggingconfig.py new file mode 100644 index 00000000..c98591c7 --- /dev/null +++ b/transfers/loggingconfig.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import logging +import logging.config # Has to be imported separately + + +def setup(log_level, log_file_name, log_name): + + # Log format string for flake8 compliance + log_fmt = ('%(levelname)-8s %(asctime)s%(filename)s:%(lineno)-4s ' + '%(message)s') + + # Configure logging + CONFIG = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'default': { + 'format': log_fmt, + 'datefmt': '%Y-%m-%d %H:%M:%S', + }, + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'formatter': 'default', + }, + 'file': { + 'class': 'logging.handlers.RotatingFileHandler', + 'formatter': 'default', + 'filename': log_file_name, + 'backupCount': 2, + 'maxBytes': 10 * 1024, + }, + }, + 'loggers': { + 'transfer': { + 'level': log_level, + 'handlers': ['console', 'file'], + }, + 'amclient': { + 'level': log_level, + 'handlers': ['file'], + }, + 'requests.packages.urllib3': { + 'level': log_level, + 'handlers': ['file'], + }, + }, + } + + LOGGER = logging.getLogger(log_name) + logging.config.dictConfig(CONFIG) + return LOGGER + + +def set_log_level(log_level, quiet, verbose): + log_levels = { + 2: 'ERROR', + 1: 'WARNING', + 0: 'INFO', + -1: 'DEBUG', + } + if log_level is None: + level = quiet - verbose + level = max(level, -1) # No smaller than -1 + level = min(level, 2) # No larger than 2 + return log_levels[level] + return log_level diff --git a/transfers/models.py b/transfers/models.py index f1cc53be..52491c5b 100644 --- a/transfers/models.py +++ b/transfers/models.py @@ -20,7 +20,8 @@ class Unit(Base): current = Column(Boolean(create_constraint=False)) def __repr__(self): - return "".format(s=self) + return "".format(s=self) def init(databasefile): diff --git a/transfers/transfer.py b/transfers/transfer.py index 5c8c4bbb..df4bd268 100755 --- a/transfers/transfer.py +++ b/transfers/transfer.py @@ -1,4 +1,6 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- + """ Automate Transfers @@ -10,19 +12,46 @@ import argparse import ast import base64 -import logging -import logging.config # Has to be imported separately import os -import subprocess import sys +import subprocess import time +import urllib3 + import requests from six import binary_type, text_type from six.moves import configparser +# Allow execution as an executable and the script to be run at package level +# by ensuring that it can see itself. +sys.path.append('../') + +from transfers import defaults +from transfers import loggingconfig +from transfers import models + + +# Directory for various processing decisions, below. +THIS_DIR = os.path.abspath(os.path.dirname(__file__)) + + +def get_setting(config_file, setting, default=None): + config = configparser.SafeConfigParser() + try: + config.read(config_file) + return config.get('transfers', setting) + except Exception: + return default + + +def get_logger(log_file_name, log_level): + return loggingconfig.setup(log_level, log_file_name, "transfer") + + +# Default logging if no other logging is provided via main(). +LOGGER = get_logger(defaults.TRANSFER_LOG_FILE, "INFO") -from . import models try: from os import fsencode, fsdecode @@ -35,7 +64,8 @@ def fsencode(filename): elif isinstance(filename, text_type): return filename.encode(encoding) else: - raise TypeError("expect bytes or str, not %s" % type(filename).__name__) + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) def fsdecode(filename): encoding = sys.getfilesystemencoding() @@ -44,93 +74,52 @@ def fsdecode(filename): elif isinstance(filename, binary_type): return filename.decode(encoding) else: - raise TypeError("expect bytes or str, not %s" % type(filename).__name__) - -THIS_DIR = os.path.abspath(os.path.dirname(__file__)) -sys.path.append(THIS_DIR) + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) -LOGGER = logging.getLogger('transfer') - -CONFIG_FILE = None - - -def get_setting(setting, default=None): - config = configparser.SafeConfigParser() - try: - config.read(CONFIG_FILE) - return config.get('transfers', setting) - except Exception: - return default - - -def setup(config_file, log_level): - global CONFIG_FILE - CONFIG_FILE = config_file - models.init(get_setting('databasefile', os.path.join(THIS_DIR, 'transfers.db'))) - - # Configure logging - default_logfile = os.path.join(THIS_DIR, 'automate-transfer.log') - CONFIG = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'default': { - 'format': '%(levelname)-8s %(asctime)s %(filename)s:%(lineno)-4s %(message)s', - 'datefmt': '%Y-%m-%d %H:%M:%S', - }, - }, - 'handlers': { - 'console': { - 'class': 'logging.StreamHandler', - 'formatter': 'default', - }, - 'file': { - 'class': 'logging.handlers.RotatingFileHandler', - 'formatter': 'default', - 'filename': get_setting('logfile', default_logfile), - 'backupCount': 2, - 'maxBytes': 10 * 1024, - }, - }, - 'loggers': { - 'transfer': { - 'level': log_level, - 'handlers': ['console', 'file'], - }, - }, - } - logging.config.dictConfig(CONFIG) - - -def _call_url_json(url, params): - """ - Helper to GET a URL where the expected response is 200 with JSON. +def _call_url_json(url, params, method='GET'): + """Helper to GET a URL where the expected response is 200 with JSON. :param str url: URL to call :param dict params: Params to pass to requests.get :returns: Dict of the returned JSON or None """ - LOGGER.debug('URL: %s; params: %s;', url, params) - response = requests.get(url, params=params) - LOGGER.debug('Response: %s', response) - if not response.ok: - LOGGER.warning('Request to %s returned %s %s', url, response.status_code, response.reason) - LOGGER.debug('Response: %s', response.text) - return None + method = method.upper() + LOGGER.debug('URL: %s; params: %s; method: %s', url, params, method) + try: - return response.json() - except ValueError: # JSON could not be decoded - LOGGER.warning('Could not parse JSON from response: %s', response.text) + response = requests.request(method, url=url, params=params) + LOGGER.debug('Response: %s', response) + LOGGER.debug('type(response.text): %s ', type(response.text)) + LOGGER.debug('Response content-type: %s', + response.headers['content-type']) + + if not response.ok: + LOGGER.warning('%s Request to %s returned %s %s', method, url, + response.status_code, response.reason) + LOGGER.debug('Response: %s', response.text) + return None + try: + return response.json() + except ValueError: # JSON could not be decoded + LOGGER.warning('Could not parse JSON from response: %s', + response.text) + return None + + except (urllib3.exceptions.NewConnectionError, + requests.exceptions.ConnectionError) as e: + LOGGER.error("Connection error %s", e) return None -def get_status(am_url, am_user, am_api_key, unit_uuid, unit_type, session, hide_on_complete=False): +def get_status(am_url, am_user, am_api_key, unit_uuid, unit_type, session, + hide_on_complete=False): """ Get status of the SIP or Transfer with unit_uuid. :param str unit_uuid: UUID of the unit to query for. :param str unit_type: 'ingest' or 'transfer' - :param bool hide_on_complete: If True, hide the unit in the dashboard if COMPLETE + :param bool hide_on_complete: Hide the unit in the dashboard if COMPLETE :returns: Dict with status of the unit from Archivematica or None. """ # Get status @@ -138,6 +127,9 @@ def get_status(am_url, am_user, am_api_key, unit_uuid, unit_type, session, hide_ params = {'username': am_user, 'api_key': am_api_key} unit_info = _call_url_json(url, params) + if unit_info is None: + return None + # If complete, hide in dashboard if hide_on_complete and unit_info and unit_info['status'] == 'COMPLETE': LOGGER.info('Hiding %s %s in dashboard', unit_type, unit_uuid) @@ -147,18 +139,26 @@ def get_status(am_url, am_user, am_api_key, unit_uuid, unit_type, session, hide_ LOGGER.debug('Response: %s', response) # If Transfer is complete, get the SIP's status - if unit_info and unit_type == 'transfer' and unit_info['status'] == 'COMPLETE' and unit_info['sip_uuid'] != 'BACKLOG': - LOGGER.info('%s is a complete transfer, fetching SIP %s status.', unit_uuid, unit_info['sip_uuid']) + if unit_info and unit_type == 'transfer' and \ + unit_info['status'] == 'COMPLETE' and \ + unit_info['sip_uuid'] != 'BACKLOG': + LOGGER.info('%s is a complete transfer, fetching SIP %s status.', + unit_uuid, unit_info['sip_uuid']) # Update DB to refer to this one - db_unit = session.query(models.Unit).filter_by(unit_type=unit_type, uuid=unit_uuid).one() + db_unit = session.query(models.Unit).filter_by( + unit_type=unit_type, uuid=unit_uuid).one() db_unit.unit_type = 'ingest' db_unit.uuid = unit_info['sip_uuid'] # Get SIP status url = am_url + '/api/ingest/status/' + unit_info['sip_uuid'] + '/' unit_info = _call_url_json(url, params) + if unit_info is None: + return None + # If complete, hide in dashboard - if hide_on_complete and unit_info and unit_info['status'] == 'COMPLETE': + if hide_on_complete and unit_info and \ + unit_info['status'] == 'COMPLETE': LOGGER.info('Hiding SIP %s in dashboard', db_unit.uuid) url = am_url + '/api/ingest/' + db_unit.uuid + '/delete/' LOGGER.debug('Method: DELETE; URL: %s; params: %s;', url, params) @@ -181,27 +181,31 @@ def get_accession_id(dirname): """ script_path = os.path.join(THIS_DIR, 'get-accession-number') try: - p = subprocess.Popen([script_path, dirname], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = subprocess.Popen( + [script_path, dirname], stdin=subprocess.PIPE, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) except Exception: LOGGER.info('Error when trying to run %s', script_path) return None output, err = p.communicate() if p.returncode != 0: - LOGGER.info('Error running %s %s: RC: %s; stdout: %s; stderr: %s', script_path, dirname, p.returncode, output, err) + LOGGER.info('Error running %s %s: RC: %s; stdout: %s; stderr: %s', + script_path, dirname, p.returncode, output, err) return None output = fsdecode(output) try: return ast.literal_eval(output) except Exception: - LOGGER.info('Unable to parse output from %s. Output: %s', script_path, output) + LOGGER.info( + 'Unable to parse output from %s. Output: %s', script_path, output) return None -def run_scripts(directory, *args): +def run_scripts(directory, config_file, *args): """ Run all executable scripts in directory relative to this file. - :param str directory: Directory in the same folder as this file to run scripts from. + :param str directory: Dir in the same folder as this file to run scripts :param args: All other parameters will be passed to called scripts. :return: None """ @@ -211,7 +215,7 @@ def run_scripts(directory, *args): return script_args = list(args) LOGGER.debug('script_args: %s', script_args) - script_extensions = get_setting('scriptextensions', '').split(':') + script_extensions = get_setting(config_file, 'scriptextensions', '').split(':') LOGGER.debug('script_extensions: %s', script_extensions) for script in sorted(os.listdir(directory)): LOGGER.debug('Script: %s', script) @@ -224,10 +228,13 @@ def run_scripts(directory, *args): continue script_name, script_ext = os.path.splitext(script) if script_extensions and script_ext not in script_extensions: - LOGGER.info("'%s' for '%s' not in configured list of script file extensions, skipping", script_ext, script_path) + LOGGER.info(("'%s' for '%s' not in configured list of script file " + "extensions, skipping", script_ext, script_path)) continue LOGGER.info('Running %s "%s"', script_path, '" "'.join(args)) - p = subprocess.Popen([script_path] + script_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + p = subprocess.Popen( + [script_path] + script_args, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) stdout, stderr = p.communicate() LOGGER.info('Return code: %s', p.returncode) LOGGER.info('stdout: %s', stdout) @@ -235,19 +242,27 @@ def run_scripts(directory, *args): LOGGER.warning('stderr: %s', stderr) -def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, path_prefix, depth, completed, see_files): +def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, + path_prefix, depth, completed, see_files): """ - Helper to find the first directory that doesn't have an associated transfer. + Helper to find the first directory that doesn't have an associated + transfer. - :param ss_url: URL of the Storage Sevice to query - :param ss_user: User on the Storage Service for authentication - :param ss_api_key: API key for user on the Storage Service for authentication + :param ss_url: URL of the Storage Sevice to query + :param ss_user: User on the Storage Service for authentication + :param ss_api_key: API key for user on the Storage Service for + authentication :param ts_location_uuid: UUID of the transfer source Location - :param path_prefix: Relative path inside the Location to work with. - :param depth: Depth relative to path_prefix to create a transfer from. Should be 1 or greater. - :param set completed: Set of the paths of completed transfers. Ideally, relative to the same transfer source location, including the same path_prefix, and at the same depth. - :param bool see_files: Return files as well as folders to become transfers. - :returns: Path relative to TS Location of the new transfer + :param path_prefix: Relative path inside the Location to work with. + :param depth: Depth relative to path_prefix to create a transfer + from. Should be 1 or greater. + :param set completed: Set of the paths of completed transfers. Ideally, + relative to the same transfer source location, + including the same path_prefix, and at the same + depth. + :param bool see_files: Return files as well as folders to become + transfers. + :returns: Path relative to TS Location of the new transfer. """ # Get sorted list from source dir url = ss_url + '/api/v2/location/' + ts_location_uuid + '/browse/' @@ -258,6 +273,7 @@ def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, path_prefix if path_prefix: params['path'] = base64.b64encode(path_prefix) browse_info = _call_url_json(url, params) + if browse_info is None: return None if see_files: @@ -267,7 +283,8 @@ def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, path_prefix entries = [base64.b64decode(e.encode('utf8')) for e in entries] LOGGER.debug('Entries: %s', entries) entries = [os.path.join(path_prefix, e) for e in entries] - # If at the correct depth, check if any of these have not been made into transfers yet + # If at the correct depth, check if any of these have not been made into + # transfers yet if depth <= 1: # Find the directories that are not already in the DB using sets entries = set(entries) - completed @@ -275,7 +292,9 @@ def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, path_prefix # Sort, take the first entries = sorted(list(entries)) if not entries: - LOGGER.info("All potential transfers in %s have been created.", path_prefix) + LOGGER.info( + "All potential transfers in %s have been created.", + path_prefix) return None target = entries[0] return target @@ -283,34 +302,46 @@ def get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, path_prefix # Recurse on each directory for e in entries: LOGGER.debug('New path: %s', e) - target = get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, e, depth - 1, completed, see_files) + target = get_next_transfer( + ss_url, ss_user, ss_api_key, ts_location_uuid, e, depth - 1, + completed, see_files) if target: return target return None -def start_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth, am_url, am_user, am_api_key, transfer_type, see_files, session): +def start_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, + depth, am_url, am_user, am_api_key, transfer_type, + see_files, session, config_file): """ Starts a new transfer. :param ss_url: URL of the Storage Sevice to query :param ss_user: User on the Storage Service for authentication - :param ss_api_key: API key for user on the Storage Service for authentication + :param ss_api_key: API key for user on the Storage Service for + authentication :param ts_location_uuid: UUID of the transfer source Location :param ts_path: Relative path inside the Location to work with. - :param depth: Depth relative to ts_path to create a transfer from. Should be 1 or greater. + :param depth: Depth relative to ts_path to create a transfer from. Should + be 1 or greater. :param am_url: URL of Archivematica pipeline to start transfer on :param am_user: User on Archivematica for authentication :param am_api_key: API key for user on Archivematica for authentication - :param bool see_files: If true, start transfers from files as well as directories + :param bool see_files: If true, start transfers from files as well as + directories :param session: SQLAlchemy session with the DB - :returns: Tuple of Transfer information about the new transfer or None on error. + :returns: Tuple of Transfer information about the new transfer or None on + error. """ # Start new transfer completed = {x[0] for x in session.query(models.Unit.path).all()} - target = get_next_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth, completed, see_files) + target = get_next_transfer( + ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth, + completed, see_files) if not target: - LOGGER.warning("All potential transfers in %s have been created. Exiting", ts_path) + LOGGER.warning( + "All potential transfers in %s have been created. Exiting", + ts_path) return None LOGGER.info("Starting with %s", target) # Get accession ID @@ -324,7 +355,8 @@ def start_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth 'name': target_name, 'type': transfer_type, 'accession': accession, - 'paths[]': [base64.b64encode(fsencode(ts_location_uuid) + b':' + target)], + 'paths[]': [base64.b64encode(fsencode(ts_location_uuid) + b':' + + target)], 'row_ids[]': [''], } LOGGER.debug('URL: %s; Params: %s; Data: %s', url, params, data) @@ -338,15 +370,16 @@ def start_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth if not response.ok or resp_json.get('error'): LOGGER.error('Unable to start transfer.') LOGGER.error('Response: %s', resp_json) - new_transfer = models.Unit(path=target, unit_type='transfer', status='FAILED', current=False) + new_transfer = models.Unit( + path=target, unit_type='transfer', status='FAILED', current=False) session.add(new_transfer) return None try: # Run all scripts in pre-transfer directory - # TODO what inputs do we want? run_scripts( 'pre-transfer', + config_file, resp_json['path'], # Absolute path 'standard', # Transfer type ) @@ -363,14 +396,16 @@ def start_transfer(ss_url, ss_user, ss_api_key, ts_location_uuid, ts_path, depth # Mark as started if result: LOGGER.info('Approved %s', result) - new_transfer = models.Unit(uuid=result, path=target, unit_type='transfer', current=True) + new_transfer = models.Unit( + uuid=result, path=target, unit_type='transfer', current=True) LOGGER.info('New transfer: %s', new_transfer) session.add(new_transfer) break LOGGER.info('Failed approve, try %s of %s', i + 1, retry_count) else: LOGGER.warning('Not approved') - new_transfer = models.Unit(uuid=None, path=target, unit_type='transfer', current=False) + new_transfer = models.Unit( + uuid=None, path=target, unit_type='transfer', current=False) session.add(new_transfer) return None @@ -392,13 +427,14 @@ def approve_transfer(directory_name, url, am_api_key, am_user): waiting_transfers = _call_url_json(get_url, params) if waiting_transfers is None: LOGGER.warning('No waiting transfer ') - return None + return waiting_transfers for a in waiting_transfers['results']: LOGGER.debug("Found waiting transfer: %s", a['directory']) if fsencode(a['directory']) == directory_name: # Post to approve transfer post_url = url + "/api/transfer/approve/" - params = {'username': am_user, 'api_key': am_api_key, 'type': a['type'], 'directory': directory_name} + params = {'username': am_user, 'api_key': am_api_key, + 'type': a['type'], 'directory': directory_name} LOGGER.debug('URL: %s; Params: %s;', post_url, params) r = requests.post(post_url, data=params) LOGGER.debug('Response: %s', r) @@ -412,21 +448,32 @@ def approve_transfer(directory_name, url, am_api_key, am_user): return None -def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_url, ss_url, transfer_type, see_files, hide_on_complete=False, config_file=None, log_level='INFO'): +def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, + am_url, ss_url, transfer_type, see_files, hide_on_complete=False, + config_file=None, log_level='INFO'): + + global LOGGER + LOGGER = get_logger( + get_setting(config_file, 'logfile', defaults.TRANSFER_LOG_FILE), log_level) - setup(config_file, log_level) LOGGER.info("Waking up") + models.init( + get_setting(config_file, 'databasefile', os.path.join(THIS_DIR, + 'transfers.db'))) + session = models.Session() # Check for evidence that this is already running default_pidfile = os.path.join(THIS_DIR, 'pid.lck') - pid_file = get_setting('pidfile', default_pidfile) + pid_file = get_setting(config_file, 'pidfile', default_pidfile) try: # Open PID file only if it doesn't exist for read/write - f = os.fdopen(os.open(pid_file, os.O_CREAT | os.O_EXCL | os.O_RDWR), 'r+') + f = os.fdopen( + os.open(pid_file, os.O_CREAT | os.O_EXCL | os.O_RDWR), 'r+') except: - LOGGER.info('This script is already running. To override this behaviour and start a new run, remove %s', pid_file) + LOGGER.info('This script is already running. To override this ' + 'behaviour and start a new run, remove %s', pid_file) return 0 else: pid = os.getpid() @@ -447,7 +494,9 @@ def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_u else: LOGGER.info('Current unit: %s', current_unit) # Get status - status_info = get_status(am_url, am_user, am_api_key, unit_uuid, unit_type, session, hide_on_complete) + status_info = get_status( + am_url, am_user, am_api_key, unit_uuid, unit_type, session, + hide_on_complete) LOGGER.info('Status info: %s', status_info) if not status_info: LOGGER.error('Could not fetch status for %s. Exiting.', unit_uuid) @@ -463,13 +512,15 @@ def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_u return 0 # If waiting on input, send email, exit elif status == 'USER_INPUT': - LOGGER.info('Waiting on user input, running scripts in user-input directory.') + LOGGER.info( + 'Waiting on user input, running scripts in user-input directory.') # TODO What inputs do we want? microservice = status_info.get('microservice', '') run_scripts( 'user-input', microservice, # Current microservice name - str(microservice != current_unit.microservice), # String True or False if this is the first time at this wait point + # String True or False if this is the first time at this prompt + str(microservice != current_unit.microservice), status_info['path'], # Absolute path status_info['uuid'], # SIP/Transfer UUID status_info['name'], # SIP/Transfer name @@ -482,7 +533,10 @@ def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_u # If failed, rejected, completed etc, start new transfer if current_unit: current_unit.current = False - new_transfer = start_transfer(ss_url, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_url, am_user, am_api_key, transfer_type, see_files, session) + new_transfer = start_transfer( + ss_url, ss_user, ss_api_key, ts_uuid, ts_path, + depth, am_url, am_user, am_api_key, + transfer_type, see_files, session, config_file) session.commit() os.remove(pid_file) @@ -491,41 +545,77 @@ def main(am_user, am_api_key, ss_user, ss_api_key, ts_uuid, ts_path, depth, am_u if __name__ == '__main__': - parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('-u', '--user', metavar='USERNAME', required=True, help='Username of the Archivematica dashboard user to authenticate as.') - parser.add_argument('-k', '--api-key', metavar='KEY', required=True, help='API key of the Archivematica dashboard user.') - parser.add_argument('--ss-user', metavar='USERNAME', required=True, help='Username of the Storage Service user to authenticate as.') - parser.add_argument('--ss-api-key', metavar='KEY', required=True, help='API key of the Storage Service user.') - parser.add_argument('-t', '--transfer-source', metavar='UUID', required=True, help='Transfer Source Location UUID to fetch transfers from.') - parser.add_argument('--transfer-path', metavar='PATH', help='Relative path within the Transfer Source. Default: ""', type=fsencode, default=b'') # Convert to bytes from unicode str provided by command line - parser.add_argument('--depth', '-d', help='Depth to create the transfers from relative to the transfer source location and path. Default of 1 creates transfers from the children of transfer-path.', type=int, default=1) - parser.add_argument('--am-url', '-a', metavar='URL', help='Archivematica URL. Default: http://127.0.0.1', default='http://127.0.0.1') - parser.add_argument('--ss-url', '-s', metavar='URL', help='Storage Service URL. Default: http://127.0.0.1:8000', default='http://127.0.0.1:8000') - parser.add_argument('--transfer-type', metavar='TYPE', help="Type of transfer to start. One of: 'standard' (default), 'unzipped bag', 'zipped bag', 'dspace'.", default='standard', choices=['standard', 'unzipped bag', 'zipped bag', 'dspace']) - parser.add_argument('--files', action='store_true', help='If set, start transfers from files as well as folders.') - parser.add_argument('--hide', action='store_true', help='If set, hide the Transfers and SIPs in the dashboard once they complete.') - parser.add_argument('-c', '--config-file', metavar='FILE', help='Configuration file(log/db/PID files)', default=None) + # Variable for conformance to flake8 line lenght below. + rawformatter = argparse.RawDescriptionHelpFormatter + + parser = argparse.ArgumentParser(description=__doc__, + formatter_class=rawformatter) + parser.add_argument('-u', '--user', metavar='USERNAME', required=True, + help=('Username of the Archivematica dashboard user ' + 'to authenticate as.')) + parser.add_argument('-k', '--api-key', metavar='KEY', + required=True, help=('API key of the Archivematica ' + 'dashboard user.')) + parser.add_argument('--ss-user', metavar='USERNAME', required=True, + help=('Username of the Storage Service user to ' + 'authenticate as.')) + parser.add_argument('--ss-api-key', metavar='KEY', + required=True, help=('API key of the Storage Service ' + 'user.')) + parser.add_argument( + '-t', '--transfer-source', metavar='UUID', required=True, + help='Transfer Source Location UUID to fetch transfers from.') + parser.add_argument( + # default=b'' to convert to bytes from unicode str provided by + # command line. + '--transfer-path', metavar='PATH', help=('Relative path within the ' + 'Transfer Source. Default: ""' + ), type=fsencode, default=b'') + parser.add_argument( + '--depth', '-d', help=('Depth to create the transfers from relative ' + 'to the transfer source location and path. ' + 'Default of 1 creates transfers from the ' + 'children of transfer-path.'), type=int, + default=1) + parser.add_argument('--am-url', '-a', metavar='URL', + help='Archivematica URL. Default: %s' % + defaults.DEF_AM_URL, + default='%s' % defaults.DEF_AM_URL) + parser.add_argument('--ss-url', '-s', metavar='URL', + help='Storage Service URL. Default: %s' % + defaults.DEF_SS_URL, + default='%s' % defaults.DEF_SS_URL) + parser.add_argument( + '--transfer-type', metavar='TYPE', help=("Type of transfer to start. " + "One of: 'standard' " + "(default), 'unzipped bag', " + "'zipped bag', 'dspace'."), + default='standard', choices=['standard', 'unzipped bag', + 'zipped bag', 'dspace']) + parser.add_argument('--files', action='store_true', + help=('If set, start transfers from files as well as ' + 'folders.')) + parser.add_argument('--hide', action='store_true', + help=('If set, hide the Transfers and SIPs in the ' + 'dashboard once they complete.')) + parser.add_argument('-c', '--config-file', metavar='FILE', + help='Configuration file(log/db/PID files)', + default=None) # Logging - parser.add_argument('--verbose', '-v', action='count', default=0, help='Increase the debugging output.') - parser.add_argument('--quiet', '-q', action='count', default=0, help='Decrease the debugging output') - parser.add_argument('--log-level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'], default=None, help='Set the debugging output level. This will override -q and -v') + parser.add_argument('--verbose', '-v', action='count', + default=0, help='Increase the debugging output.') + parser.add_argument('--quiet', '-q', action='count', + default=0, help='Decrease the debugging output') + parser.add_argument( + '--log-level', choices=['ERROR', 'WARNING', 'INFO', 'DEBUG'], + default=None, help=('Set the debugging output level. This will ' + 'override -q and -v')) args = parser.parse_args() - log_levels = { - 2: 'ERROR', - 1: 'WARNING', - 0: 'INFO', - -1: 'DEBUG', - } - if args.log_level is None: - level = args.quiet - args.verbose - level = max(level, -1) # No smaller than -1 - level = min(level, 2) # No larger than 2 - log_level = log_levels[level] - else: - log_level = args.log_level + log_level = loggingconfig.set_log_level( + args.log_level, args.quiet, args.verbose) sys.exit(main( am_user=args.user,