Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove legacy library interface #4908

Merged
merged 35 commits into from
Nov 13, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
3097fd4
Remove library makos
guerler Oct 30, 2017
9ff9f4b
Remove functions from controller
guerler Oct 31, 2017
fe5983a
Remove library common controller
guerler Nov 1, 2017
a7a5141
Add create folder from controller to api
guerler Nov 1, 2017
db3bb5f
Move checks from controller, possible bug found
guerler Nov 1, 2017
b88cbae
Remove mako related controller options
guerler Nov 1, 2017
ab432bf
Move api calls to legacy controller to api endpoint
guerler Nov 1, 2017
e7ec08b
Fix caller
guerler Nov 1, 2017
c7ca7a6
Remove unused variables
guerler Nov 1, 2017
b1ccba3
Remove unused imports
guerler Nov 1, 2017
72ba00e
Remove unused variables
guerler Nov 1, 2017
345cbf6
Merge branch 'dev' into remove_old_library
guerler Nov 1, 2017
30e0be5
Merge branch 'dev' into remove_old_library
guerler Nov 1, 2017
0ea7720
Fix import order
guerler Nov 1, 2017
b17eea8
Merge branch 'dev' into remove_old_library
guerler Nov 1, 2017
6c96130
Fix order
guerler Nov 1, 2017
927ff65
Merge branch 'dev' into remove_old_library
guerler Nov 2, 2017
ebd6ee5
Merge branch 'dev' into remove_old_library
guerler Nov 3, 2017
6650e94
Merge branch 'dev' into remove_old_library
guerler Nov 4, 2017
5758f79
Remove info statement for legacy data library interface
guerler Nov 5, 2017
890197c
Remove explicit controller name parsing for api
guerler Nov 6, 2017
d0a80fa
Remove unused whoosh indexing config options
guerler Nov 7, 2017
dd0649c
Remove from galaxy.ini
guerler Nov 7, 2017
e1b167b
Merge branch 'dev' into remove_old_library
guerler Nov 7, 2017
8422feb
Remove whoosh from directory creation method
guerler Nov 7, 2017
4b30e84
Remove other unused library config options
guerler Nov 7, 2017
991ee7d
Merge branch 'dev' into remove_old_library
guerler Nov 7, 2017
c9ab13f
Move shared actions to separate file
guerler Nov 9, 2017
ed4b004
Fix import order, remove unused libraries
guerler Nov 9, 2017
7e372c9
Merge branch 'dev' into remove_old_library
guerler Nov 9, 2017
ff7b29c
Use underscores for private functions consistently
guerler Nov 9, 2017
29944cf
Merge branch 'dev' into remove_old_library
guerler Nov 10, 2017
063aec3
Fix comment
guerler Nov 10, 2017
fc9059a
Restore database/info.txt.
jmchilton Sep 1, 2016
15852fc
Merge branch 'dev' into remove_old_library
martenson Nov 13, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions client/galaxy/scripts/apps/panels/admin-panel.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,6 @@ var AdminPanel = Backbone.View.extend({
title: "Data tables",
url: "admin/view_tool_data_tables"
},
{
title: "Data libraries",
url: "library_admin/browse_libraries"
},
{
title: "Display applications",
url: "admin/display_applications"
Expand Down
13 changes: 0 additions & 13 deletions config/galaxy.ini.sample
Original file line number Diff line number Diff line change
Expand Up @@ -951,19 +951,6 @@ use_interactive = True
# communicate with this manager over the port specified here.
#transfer_manager_port = 8163

# Search data libraries with whoosh
#enable_whoosh_library_search = True
# Whoosh indexes are stored in this directory.
#whoosh_index_dir = database/whoosh_indexes

# Search data libraries with lucene
#enable_lucene_library_search = False
# maximum file size to index for searching, in MB
#fulltext_max_size = 500
#fulltext_noindex_filetypes = bam,sam,wig,bigwig,fasta,fastq,fastqsolexa,fastqillumina,fastqsanger
# base URL of server providing search functionality using lucene
#fulltext_url = http://localhost:8081

# -- Toolbox Search

# The following boosts are used to customize this instance's toolbox search.
Expand Down
264 changes: 264 additions & 0 deletions lib/galaxy/actions/library.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,264 @@
"""
Contains library functions
"""
import json
import logging
import os.path
from markupsafe import escape
from galaxy import util
from galaxy.tools.actions import upload_common
from galaxy.tools.parameters import populate_state
from galaxy.util.path import (
safe_contains,
safe_relpath,
unsafe_walk
)

log = logging.getLogger(__name__)


class LibraryActions(object):
"""
Mixin for controllers that provide library functionality.
"""

def _upload_dataset(self, trans, library_id, folder_id, replace_dataset=None, **kwd):
# Set up the traditional tool state/params
cntrller = 'api'
tool_id = 'upload1'
message = None
tool = trans.app.toolbox.get_tool(tool_id)
state = tool.new_state(trans)
populate_state(trans, tool.inputs, kwd, state.inputs)
tool_params = state.inputs
dataset_upload_inputs = []
for input_name, input in tool.inputs.items():
if input.type == "upload_dataset":
dataset_upload_inputs.append(input)
# Library-specific params
server_dir = kwd.get('server_dir', '')
upload_option = kwd.get('upload_option', 'upload_file')
response_code = 200
if upload_option == 'upload_directory':
if server_dir in [None, 'None', '']:
response_code = 400
if trans.user_is_admin():
import_dir = trans.app.config.library_import_dir
import_dir_desc = 'library_import_dir'
else:
import_dir = trans.app.config.user_library_import_dir
if server_dir != trans.user.email:
import_dir = os.path.join(import_dir, trans.user.email)
import_dir_desc = 'user_library_import_dir'
full_dir = os.path.join(import_dir, server_dir)
unsafe = None
if safe_relpath(server_dir):
if import_dir_desc == 'user_library_import_dir' and safe_contains(import_dir, full_dir, whitelist=trans.app.config.user_library_import_symlink_whitelist):
for unsafe in unsafe_walk(full_dir, whitelist=[import_dir] + trans.app.config.user_library_import_symlink_whitelist):
log.error('User attempted to import a path that resolves to a path outside of their import dir: %s -> %s', unsafe, os.path.realpath(unsafe))
else:
log.error('User attempted to import a directory path that resolves to a path outside of their import dir: %s -> %s', server_dir, os.path.realpath(full_dir))
unsafe = True
if unsafe:
response_code = 403
message = 'Invalid server_dir'
if import_dir:
message = 'Select a directory'
else:
response_code = 403
message = '"%s" is not defined in the Galaxy configuration file' % import_dir_desc
elif upload_option == 'upload_paths':
if not trans.app.config.allow_library_path_paste:
response_code = 403
message = '"allow_library_path_paste" is not defined in the Galaxy configuration file'
# Some error handling should be added to this method.
try:
# FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd
# should be passed so that complex objects that may have been included in the initial request remain.
library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset)
except Exception:
response_code = 500
message = "Unable to parse upload parameters, please report this error."
# Proceed with (mostly) regular upload processing if we're still errorless
if response_code == 200:
precreated_datasets = upload_common.get_precreated_datasets(trans, tool_params, trans.app.model.LibraryDatasetDatasetAssociation, controller=cntrller)
if upload_option == 'upload_file':
tool_params = upload_common.persist_uploads(tool_params, trans)
uploaded_datasets = upload_common.get_uploaded_datasets(trans, cntrller, tool_params, precreated_datasets, dataset_upload_inputs, library_bunch=library_bunch)
elif upload_option == 'upload_directory':
uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets(trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message)
elif upload_option == 'upload_paths':
uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets(trans, kwd, library_bunch, response_code, message)
upload_common.cleanup_unused_precreated_datasets(precreated_datasets)
if upload_option == 'upload_file' and not uploaded_datasets:
response_code = 400
message = 'Select a file, enter a URL or enter text'
if response_code != 200:
return (response_code, message)
json_file_path = upload_common.create_paramfile(trans, uploaded_datasets)
data_list = [ud.data for ud in uploaded_datasets]
job_params = {}
job_params['link_data_only'] = json.dumps(kwd.get('link_data_only', 'copy_files'))
job_params['uuid'] = json.dumps(kwd.get('uuid', None))
job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params)
trans.sa_session.add(job)
trans.sa_session.flush()
return output

def _get_server_dir_uploaded_datasets(self, trans, params, full_dir, import_dir_desc, library_bunch, response_code, message):
dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc)
files = dir_response[0]
if not files:
return dir_response
uploaded_datasets = []
for file in files:
name = os.path.basename(file)
uploaded_datasets.append(self._make_library_uploaded_dataset(trans, params, name, file, 'server_dir', library_bunch))
return uploaded_datasets, 200, None

def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, response_code, message):
preserve_dirs = util.string_as_bool(params.get('preserve_dirs', False))
uploaded_datasets = []
(files_and_folders, _response_code, _message) = self._get_path_files_and_folders(params, preserve_dirs)
if _response_code:
return (uploaded_datasets, _response_code, _message)
for (path, name, folder) in files_and_folders:
uploaded_datasets.append(self._make_library_uploaded_dataset(trans, params, name, path, 'path_paste', library_bunch, folder))
return uploaded_datasets, 200, None

def _get_path_files_and_folders(self, params, preserve_dirs):
problem_response = self._check_path_paste_params(params)
if problem_response:
return problem_response
files_and_folders = []
for (line, path) in self._paths_list(params):
line_files_and_folders = self._get_single_path_files_and_folders(line, path, preserve_dirs)
files_and_folders.extend(line_files_and_folders)
return files_and_folders, None, None

def _get_single_path_files_and_folders(self, line, path, preserve_dirs):
files_and_folders = []
if os.path.isfile(path):
name = os.path.basename(path)
files_and_folders.append((path, name, None))
for basedir, dirs, files in os.walk(line):
for file in files:
file_path = os.path.abspath(os.path.join(basedir, file))
if preserve_dirs:
in_folder = os.path.dirname(file_path.replace(path, '', 1).lstrip('/'))
else:
in_folder = None
files_and_folders.append((file_path, file, in_folder))
return files_and_folders

def _paths_list(self, params):
return [(l.strip(), os.path.abspath(l.strip())) for l in params.get('filesystem_paths', '').splitlines() if l.strip()]

def _check_path_paste_params(self, params):
if params.get('filesystem_paths', '') == '':
message = "No paths entered in the upload form"
response_code = 400
return None, response_code, message
bad_paths = []
for (_, path) in self._paths_list(params):
if not os.path.exists(path):
bad_paths.append(path)
if bad_paths:
message = 'Invalid paths: "%s".' % '", "'.join(bad_paths)
response_code = 400
return None, response_code, message
return None

def _make_library_uploaded_dataset(self, trans, params, name, path, type, library_bunch, in_folder=None):
link_data_only = params.get('link_data_only', 'copy_files')
uuid_str = params.get('uuid', None)
file_type = params.get('file_type', None)
library_bunch.replace_dataset = None # not valid for these types of upload
uploaded_dataset = util.bunch.Bunch()
new_name = name
# Remove compressed file extensions, if any, but only if
# we're copying files into Galaxy's file space.
if link_data_only == 'copy_files':
if new_name.endswith('.gz'):
new_name = new_name.rstrip('.gz')
elif new_name.endswith('.zip'):
new_name = new_name.rstrip('.zip')
uploaded_dataset.name = new_name
uploaded_dataset.path = path
uploaded_dataset.type = type
uploaded_dataset.ext = None
uploaded_dataset.file_type = file_type
uploaded_dataset.dbkey = params.get('dbkey', None)
uploaded_dataset.to_posix_lines = params.get('to_posix_lines', None)
uploaded_dataset.space_to_tab = params.get('space_to_tab', None)
uploaded_dataset.tag_using_filenames = params.get('tag_using_filenames', True)
if in_folder:
uploaded_dataset.in_folder = in_folder
uploaded_dataset.data = upload_common.new_upload(trans, 'api', uploaded_dataset, library_bunch)
uploaded_dataset.link_data_only = link_data_only
uploaded_dataset.uuid = uuid_str
if link_data_only == 'link_to_files':
uploaded_dataset.data.file_name = os.path.abspath(path)
# Since we are not copying the file into Galaxy's managed
# default file location, the dataset should never be purgable.
uploaded_dataset.data.dataset.purgable = False
trans.sa_session.add_all((uploaded_dataset.data, uploaded_dataset.data.dataset))
trans.sa_session.flush()
return uploaded_dataset

def _create_folder(self, trans, parent_id, library_id, **kwd):
is_admin = trans.user_is_admin()
current_user_roles = trans.get_current_user_roles()
try:
parent_folder = trans.sa_session.query(trans.app.model.LibraryFolder).get(trans.security.decode_id(parent_id))
except Exception:
parent_folder = None
# Check the library which actually contains the user-supplied parent folder, not the user-supplied
# library, which could be anything.
self._check_access(trans, is_admin, parent_folder, current_user_roles)
self._check_add(trans, is_admin, parent_folder, current_user_roles)
new_folder = trans.app.model.LibraryFolder(name=kwd.get('name', ''),
description=kwd.get('description', ''))
# We are associating the last used genome build with folders, so we will always
# initialize a new folder with the first dbkey in genome builds list which is currently
# ? unspecified (?)
new_folder.genome_build = trans.app.genome_builds.default_value
parent_folder.add_folder(new_folder)
trans.sa_session.add(new_folder)
trans.sa_session.flush()
# New folders default to having the same permissions as their parent folder
trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder)
return 200, dict(created=new_folder)

def _check_access(self, trans, is_admin, item, current_user_roles):
can_access = True
if isinstance(item, trans.model.HistoryDatasetAssociation):
# Make sure the user has the DATASET_ACCESS permission on the history_dataset_association.
if not item:
message = "Invalid history dataset (%s) specified." % escape(str(item))
can_access = False
elif not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) and item.history.user == trans.user:
message = "You do not have permission to access the history dataset with id (%s)." % str(item.id)
can_access = False
else:
# Make sure the user has the LIBRARY_ACCESS permission on the library item.
if not item:
message = "Invalid library item (%s) specified." % escape(str(item))
can_access = False
elif not (is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user)):
if isinstance(item, trans.model.Library):
item_type = 'data library'
elif isinstance(item, trans.model.LibraryFolder):
item_type = 'folder'
else:
item_type = '(unknown item type)'
message = "You do not have permission to access the %s with id (%s)." % (escape(item_type), str(item.id))
can_access = False
if not can_access:
return 400, message

def _check_add(self, trans, is_admin, item, current_user_roles):
# Deny access if the user is not an admin and does not have the LIBRARY_ADD permission.
if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)):
message = "You are not authorized to add an item to (%s)." % escape(item.name)
return 403, message
6 changes: 1 addition & 5 deletions lib/galaxy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,9 +403,6 @@ def __init__(self, **kwargs):
self.user_library_import_dir = kwargs.get('user_library_import_dir', None)
self.user_library_import_symlink_whitelist = listify(kwargs.get('user_library_import_symlink_whitelist', []), do_strip=True)
# Searching data libraries
self.enable_lucene_library_search = string_as_bool(kwargs.get('enable_lucene_library_search', False))
self.enable_whoosh_library_search = string_as_bool(kwargs.get('enable_whoosh_library_search', False))
self.whoosh_index_dir = resolve_path(kwargs.get("whoosh_index_dir", "database/whoosh_indexes"), self.root)
self.ftp_upload_dir = kwargs.get('ftp_upload_dir', None)
self.ftp_upload_dir_identifier = kwargs.get('ftp_upload_dir_identifier', 'email') # attribute on user - email, username, id, etc...
self.ftp_upload_dir_template = kwargs.get('ftp_upload_dir_template', '${ftp_upload_dir}%s${ftp_upload_dir_identifier}' % os.path.sep)
Expand Down Expand Up @@ -759,8 +756,7 @@ def check(self):
# Create the directories that it makes sense to create
for path in (self.new_file_path, self.template_cache, self.ftp_upload_dir,
self.library_import_dir, self.user_library_import_dir,
self.nginx_upload_store, self.whoosh_index_dir,
self.object_store_cache_path):
self.nginx_upload_store, self.object_store_cache_path):
self._ensure_directory(path)
# Check that required files exist
tool_configs = self.tool_configs
Expand Down
Loading