Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(style): Run pyupgrade --py311-plus to modernize idioms #3051

Merged
merged 2 commits into from
May 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion services/datalad/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ verify_ssl = true
datalad = "==1.0.2"
pytest = "*"
coverage = "*"
mock = "*"
pytest-cov = "*"
pytest-xdist = "*"
exceptiongroup = "*"
Expand Down
9 changes: 0 additions & 9 deletions services/datalad/Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

12 changes: 6 additions & 6 deletions services/datalad/datalad_service/common/annex.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

def compute_git_hash(path, size):
"""Given a path and size, generate the git blob hash for a file."""
git_obj_header = 'blob {}'.format(size).encode() + b'\x00'
git_obj_header = f'blob {size}'.encode() + b'\x00'

Check warning on line 23 in services/datalad/datalad_service/common/annex.py

View check run for this annotation

Codecov / codecov/patch

services/datalad/datalad_service/common/annex.py#L23

Added line #L23 was not covered by tests
blob_hash = hashlib.sha1()
blob_hash.update(git_obj_header)
# If size is zero, skip opening and mmap
Expand All @@ -35,7 +35,7 @@

def compute_file_hash(git_hash, path):
"""Computes a unique hash for a given git path, based on the git hash and path values."""
return hashlib.sha1('{}:{}'.format(git_hash, path).encode()).hexdigest()
return hashlib.sha1(f'{git_hash}:{path}'.encode()).hexdigest()


def parse_ls_tree_line(gitTreeLine):
Expand Down Expand Up @@ -79,11 +79,11 @@
def compute_rmet(key, legacy=False):
if len(key) == 40:
if legacy:
key = 'SHA1--{}'.format(key)
key = f'SHA1--{key}'
else:
key = 'GIT--{}'.format(key)
key = f'GIT--{key}'
keyHash = hashlib.md5(key.encode()).hexdigest()
return '{}/{}/{}.log.rmet'.format(keyHash[0:3], keyHash[3:6], key)
return f'{keyHash[0:3]}/{keyHash[3:6]}/{key}.log.rmet'


def parse_remote_line(remoteLine,
Expand Down Expand Up @@ -161,7 +161,7 @@
gitObjects = rmetObjects['remote.log'] + '\n' + \
'\n'.join(rmetObjects[rmetPath] for rmetPath in rmetPaths)
catFileProcess = subprocess.run(['git', 'cat-file', '--batch=:::%(objectname)', '--buffer'],
cwd=path, stdout=subprocess.PIPE, input=gitObjects, encoding='utf-8', bufsize=0, universal_newlines=True)
cwd=path, stdout=subprocess.PIPE, input=gitObjects, encoding='utf-8', bufsize=0, text=True)
catFile = io.StringIO(catFileProcess.stdout)
# Read in remote.log first
remoteLogMetadata = catFile.readline().rstrip()
Expand Down
6 changes: 3 additions & 3 deletions services/datalad/datalad_service/common/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ def generate_s3_annex_options(dataset_path):
dataset_id = os.path.basename(dataset_path)
annex_options = [
'type=S3',
'bucket={}'.format(get_s3_bucket()),
f'bucket={get_s3_bucket()}',
'exporttree=yes',
'versioning=yes',
'partsize=1GiB',
'encryption=none',
'fileprefix={}/'.format(dataset_id),
f'fileprefix={dataset_id}/',
'autoenable=true',
'publicurl=https://s3.amazonaws.com/{}'.format(get_s3_bucket()),
f'publicurl=https://s3.amazonaws.com/{get_s3_bucket()}',
'public=no',
]
return annex_options
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/datalad.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from os import path

class DataladStore(object):
class DataladStore:
"""Store for Datalad state accessed by resource handlers."""

def __init__(self, annex_path):
Expand Down
4 changes: 2 additions & 2 deletions services/datalad/datalad_service/handlers/annex.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@ def hashdirmixed(key):
first_word = struct.unpack('<I', digest[:4])[0]
nums = [first_word >> (6 * x) & 31 for x in range(4)]
letters = ["0123456789zqjxkmvwgpfZQJXKMVWGPF"[i] for i in nums]
return ("{0:s}{1:s}".format(letters[1], letters[0]), "{0:s}{1:s}".format(letters[3], letters[2]))
return (f"{letters[1]:s}{letters[0]:s}", f"{letters[3]:s}{letters[2]:s}")


def key_to_path(key):
return os.path.join('.git', 'annex', 'objects', *hashdirmixed(key), key, key)


class GitAnnexResource(object):
class GitAnnexResource:
"""{worker}/{dataset}/annex/{key} serves git-annex object requests

This allows OpenNeuro to act as a special remote, adding or removing objects from .git/annex/objects/
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/annex_objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datalad_service.tasks.files import remove_annex_object


class AnnexObjectsResource(object):
class AnnexObjectsResource:

def __init__(self, store):
self.store = store
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from datalad_service.tasks.publish import delete_siblings


class DatasetResource(object):
class DatasetResource:

"""A Falcon API wrapper around underlying datalad/git-annex datasets."""

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/description.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from datalad_service.tasks.description import update_description


class DescriptionResource(object):
class DescriptionResource:
def __init__(self, store):
self.store = store

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/draft.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from datalad_service.common.git import git_commit


class DraftResource(object):
class DraftResource:
def __init__(self, store):
self.store = store

Expand Down
6 changes: 3 additions & 3 deletions services/datalad/datalad_service/handlers/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from datalad_service.tasks.files import remove_files


class FilesResource(object):
class FilesResource:

def __init__(self, store):
self.store = store
Expand Down Expand Up @@ -44,7 +44,7 @@
# File is not present in tree
resp.media = {'error': 'file not found in git tree'}
resp.status = falcon.HTTP_NOT_FOUND
except IOError:
except OSError:

Check warning on line 47 in services/datalad/datalad_service/handlers/files.py

View check run for this annotation

Codecov / codecov/patch

services/datalad/datalad_service/handlers/files.py#L47

Added line #L47 was not covered by tests
# File is not kept locally
resp.media = {'error': 'file not found'}
resp.status = falcon.HTTP_NOT_FOUND
Expand All @@ -54,7 +54,7 @@
'error': 'an unknown error occurred accessing this file'}
resp.status = falcon.HTTP_INTERNAL_SERVER_ERROR
self.logger.exception(
'An unknown error processing file "{}"'.format(filename))
f'An unknown error processing file "{filename}"')

def on_post(self, req, resp, dataset, filename):
"""Post will create new files and adds them to the annex if they do not exist, else update existing files."""
Expand Down
10 changes: 5 additions & 5 deletions services/datalad/datalad_service/handlers/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,14 @@
user = 'user' in req.context and req.context['user'] or None
# No user = unauthorized, otherwise token is present with the wrong scope/grant
if user == None:
resp.data = 'Authentication required for git access'.encode()
resp.data = b'Authentication required for git access'

Check warning on line 31 in services/datalad/datalad_service/handlers/git.py

View check run for this annotation

Codecov / codecov/patch

services/datalad/datalad_service/handlers/git.py#L31

Added line #L31 was not covered by tests
resp.status = falcon.HTTP_UNAUTHORIZED
else:
resp.data = 'You do not have permission to access this dataset'.encode()
resp.data = b'You do not have permission to access this dataset'

Check warning on line 34 in services/datalad/datalad_service/handlers/git.py

View check run for this annotation

Codecov / codecov/patch

services/datalad/datalad_service/handlers/git.py#L34

Added line #L34 was not covered by tests
resp.status = falcon.HTTP_FORBIDDEN


class GitRefsResource(object):
class GitRefsResource:
"""/info/refs returns current state for either git-receive-pack or git-upload-pack"""

def __init__(self, store):
Expand Down Expand Up @@ -70,7 +70,7 @@
resp.status = falcon.HTTP_UNPROCESSABLE_ENTITY


class GitReceiveResource(object):
class GitReceiveResource:
"""/git-receive-pack is used to receive pushes"""

def __init__(self, store):
Expand Down Expand Up @@ -100,7 +100,7 @@
resp.status = falcon.HTTP_UNPROCESSABLE_ENTITY


class GitUploadResource(object):
class GitUploadResource:
"""/git-upload-pack serves git fetch requests"""

def __init__(self, store):
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/heartbeat.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import falcon


class HeartbeatResource(object):
class HeartbeatResource:

def on_get(self, req, resp):
resp.media = {
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from datalad_service.common.git import git_tag


class HistoryResource(object):
class HistoryResource:
def __init__(self, store):
self.store = store

Expand Down
4 changes: 2 additions & 2 deletions services/datalad/datalad_service/handlers/objects.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datalad_service.common.git import git_show_object


class ObjectsResource(object):
class ObjectsResource:

def __init__(self, store):
self.store = store
Expand All @@ -31,4 +31,4 @@ def on_get(self, req, resp, dataset, obj):
'error': 'an unknown error occurred accessing this file'}
resp.status = falcon.HTTP_INTERNAL_SERVER_ERROR
self.logger.exception(
'An unknown error processing object "{}"'.format(obj))
f'An unknown error processing object "{obj}"')
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/publish.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from datalad_service.tasks.publish import create_remotes_and_export


class PublishResource(object):
class PublishResource:

"""A Falcon API wrapper around underlying datalad/git-annex datasets."""

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/reexporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datalad_service.tasks.publish import export_dataset


class ReexporterResource(object):
class ReexporterResource:
def __init__(self, store):
self.store = store
self.logger = logging.getLogger('datalad_service.' + __name__)
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/remote_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from datalad_service.common.user import get_user_info


class RemoteImportResource(object):
class RemoteImportResource:
def __init__(self, store):
self.store = store
self.logger = logging.getLogger('datalad_service.' + __name__)
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/reset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from datalad_service.tasks.files import commit_files


class ResetResource(object):
class ResetResource:
def __init__(self, store):
self.store = store

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/snapshots.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from datalad_service.common.git import delete_tag


class SnapshotResource(object):
class SnapshotResource:

"""Snapshots on top of DataLad datasets."""

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from datalad_service.tasks.files import get_tree


class TreeResource(object):
class TreeResource:
def __init__(self, store):
self.store = store
self.logger = logging.getLogger('datalad_service.' + __name__)
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def move_files_into_repo(dataset_id, dataset_path, upload_path, name, email, coo
update_head(dataset_id, dataset_path, hexsha, cookies)


class UploadResource(object):
class UploadResource:
def __init__(self, store):
self.store = store
self.logger = logging.getLogger('datalad_service.' + __name__)
Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/handlers/validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from datalad_service.tasks.validator import validate_dataset


class ValidationResource(object):
class ValidationResource:
def __init__(self, store):
self.store = store

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/middleware/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def parse_authorization_header(authorization):
return None


class AuthenticateMiddleware(object):
class AuthenticateMiddleware:
def process_request(self, req, resp):
"""Process the request before routing it for authentication.

Expand Down
2 changes: 1 addition & 1 deletion services/datalad/datalad_service/tasks/audit.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
def fsck_remote(dataset_path, remote):
"""Run fsck for one dataset remote"""
# Run at most once per month per dataset
annex_command = ("git-annex", "fsck", "--all", "--from={}".format(remote), "--fast", "--json",
annex_command = ("git-annex", "fsck", "--all", f"--from={remote}", "--fast", "--json",

Check warning on line 16 in services/datalad/datalad_service/tasks/audit.py

View check run for this annotation

Codecov / codecov/patch

services/datalad/datalad_service/tasks/audit.py#L16

Added line #L16 was not covered by tests
"--json-error-messages", "--incremental", "--incremental-schedule=30d", "--time-limit=15m")
annex_process = subprocess.Popen(
annex_command, cwd=dataset_path, stdout=subprocess.PIPE, encoding='utf-8')
Expand Down
6 changes: 3 additions & 3 deletions services/datalad/datalad_service/tasks/snapshots.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,14 @@ def get_snapshot(store, dataset, snapshot):
hexsha = commit.hex
created = commit.commit_time
tree = commit.tree_id.hex
return {'id': '{}:{}'.format(dataset, snapshot), 'tag': snapshot, 'hexsha': hexsha, 'created': created, 'tree': tree}
return {'id': f'{dataset}:{snapshot}', 'tag': snapshot, 'hexsha': hexsha, 'created': created, 'tree': tree}


def get_snapshots(store, dataset):
path = store.get_dataset_path(dataset)
repo_tags = git_tag(pygit2.Repository(path))
# Include an extra id field to uniquely identify snapshots
tags = [{'id': '{}:{}'.format(dataset, tag.shorthand), 'tag': tag.shorthand, 'hexsha': tag.target.hex, 'created': tag.peel().commit_time}
tags = [{'id': f'{dataset}:{tag.shorthand}', 'tag': tag.shorthand, 'hexsha': tag.target.hex, 'created': tag.peel().commit_time}
for tag in repo_tags]
return tags

Expand Down Expand Up @@ -131,7 +131,7 @@ def validate_snapshot_name(store, dataset, snapshot):
tagged = [tag for tag in tags if tag.name == snapshot]
if tagged:
raise SnapshotExistsException(
'Tag "{}" already exists, name conflict'.format(snapshot))
f'Tag "{snapshot}" already exists, name conflict')


def validate_datalad_config(store, dataset):
Expand Down
4 changes: 2 additions & 2 deletions services/datalad/tests/test_annex_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def test_key_add_remove(client):
ds_id = 'ds000001'
key = "MD5E-s24--e04bb0391ed06622b018aac26c736870.nii"
random_value = b"ooQuieshaz4of2Aip3Niec2e"
url = '/git/0/{}/annex/{}'.format(ds_id, key)
url = f'/git/0/{ds_id}/annex/{key}'
response = client.simulate_post(
url, headers={"authorization": test_auth}, body=random_value)
assert response.status == falcon.HTTP_OK
Expand All @@ -33,7 +33,7 @@ def test_key_get_head(client):
ds_id = 'ds000001'
key = "MD5E-s24--00e7097e83570b24b69cc509fc8f3cbf.nii"
random_value = b"soo2aid1po5teiJoowufah4i"
url = '/git/0/{}/annex/{}'.format(ds_id, key)
url = f'/git/0/{ds_id}/annex/{key}'
# Test failure first
response = client.simulate_head(url, headers={"authorization": test_auth})
assert response.status == falcon.HTTP_NOT_FOUND
Expand Down
Loading
Loading