From 6f86dc1add9234075bcf1c28181425ac4d1a9236 Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 15:39:47 +0100 Subject: [PATCH 1/9] Fix E501 line length issues in console.py, data.py, dictionary.py, exports.py, and file.py --- .ruff.toml | 42 +++ obiba_opal/console.py | 675 ++++++++++++++++++++++++++++++--------- obiba_opal/data.py | 154 ++++++--- obiba_opal/dictionary.py | 359 ++++++++++++++++----- obiba_opal/exports.py | 648 ++++++++++++++++++++++++++++--------- obiba_opal/file.py | 67 ++-- 6 files changed, 1498 insertions(+), 447 deletions(-) create mode 100644 .ruff.toml diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..e730c4d --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,42 @@ +# Ruff configuration file + +# Exclude a variety of commonly ignored directories. +extend-exclude = [ + "__pycache__", + ".git", + ".venv", + ".eggs", + ".nox", + ".tox", + ".svn", + ".hg", + "build", + "dist", + ".mypy_cache", + ".pytest_cache", +] + +# Assume Python 3.10. +target-version = "py310" + +# Same as Black. +line-length = 88 + +[lint] +# Enable flake8-bugbear rules +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade + "SIM", # flake8-simplify +] + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] + + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" \ No newline at end of file diff --git a/obiba_opal/console.py b/obiba_opal/console.py index 3034b99..0dfab03 100755 --- a/obiba_opal/console.py +++ b/obiba_opal/console.py @@ -6,191 +6,574 @@ import getpass from obiba_opal.core import Formatter, HTTPError -from obiba_opal.project import ProjectService, BackupProjectCommand, RestoreProjectCommand -from obiba_opal.table import CopyTableCommand, DeleteTableService, BackupViewService, RestoreViewService -from obiba_opal.dictionary import DictionaryService, ExportAnnotationsService, ImportAnnotationsService +from obiba_opal.project import ( + ProjectService, + BackupProjectCommand, + RestoreProjectCommand, +) +from obiba_opal.table import ( + CopyTableCommand, + DeleteTableService, + BackupViewService, + RestoreViewService, +) +from obiba_opal.dictionary import ( + DictionaryService, + ExportAnnotationsService, + ImportAnnotationsService, +) from obiba_opal.data import DataService, EntityService from obiba_opal.analysis import AnalysisCommand, ExportAnalysisService from obiba_opal.file import FileService -from obiba_opal.exports import ExportPluginCommand, ExportCSVCommand, ExportXMLCommand, ExportRSASCommand, ExportRSPSSCommand, ExportRSTATACommand, ExportRDSCommand, ExportSQLCommand, ExportVCFCommand +from obiba_opal.exports import ( + ExportPluginCommand, + ExportCSVCommand, + ExportXMLCommand, + ExportRSASCommand, + ExportRSPSSCommand, + ExportRSTATACommand, + ExportRDSCommand, + ExportSQLCommand, + ExportVCFCommand, +) from obiba_opal.subjects import UserService, GroupService -from obiba_opal.perm import ProjectPermService, DatasourcePermService, TablePermService, VariablePermService, ResourcePermService, ResourcesPermService, RPermService, DataSHIELDPermService, SystemPermService -from obiba_opal.imports import ImportPluginCommand, ImportCSVCommand, ImportIDMapService, ImportIDService, ImportLimeSurveyCommand, ImportOpalCommand, ImportRDSCommand, ImportRSASCommand, ImportRSPSSCommand, ImportRSTATACommand, ImportSQLCommand, ImportVCFCommand, ImportXMLCommand -from obiba_opal.system import PluginService, SystemService, TaxonomyService, TaskService, RESTService +from obiba_opal.perm import ( + ProjectPermService, + DatasourcePermService, + TablePermService, + VariablePermService, + ResourcePermService, + ResourcesPermService, + RPermService, + DataSHIELDPermService, + SystemPermService, +) +from obiba_opal.imports import ( + ImportPluginCommand, + ImportCSVCommand, + ImportIDMapService, + ImportIDService, + ImportLimeSurveyCommand, + ImportOpalCommand, + ImportRDSCommand, + ImportRSASCommand, + ImportRSPSSCommand, + ImportRSTATACommand, + ImportSQLCommand, + ImportVCFCommand, + ImportXMLCommand, +) +from obiba_opal.system import ( + PluginService, + SystemService, + TaxonomyService, + TaskService, + RESTService, +) from obiba_opal.sql import SQLService, SQLHistoryService from obiba_opal.security import EncryptService, DecryptService + def prompt_password(): - return getpass.getpass(prompt='Enter password: ') + return getpass.getpass(prompt="Enter password: ") + def add_opal_arguments(parser): """ Add Opal access arguments """ - parser.add_argument('--opal', '-o', required=False, default='http://localhost:8080', - help='Opal server base url (default: http://localhost:8080)') - parser.add_argument('--user', '-u', required=False, help='Credentials auth: user name (requires a password)') - parser.add_argument('--password', '-p', required=False, nargs="?", - help='Credentials auth: user password (requires a user name)') - parser.add_argument('--token', '-tk', required=False, help='Token auth: User access token') - parser.add_argument('--ssl-cert', '-sc', required=False, - help='Two-way SSL auth: certificate/public key file (requires a private key)') - parser.add_argument('--ssl-key', '-sk', required=False, - help='Two-way SSL auth: private key file (requires a certificate)') - parser.add_argument('--verbose', '-v', action='store_true', help='Verbose output') - parser.add_argument('--no-ssl-verify', '-nv', action='store_true', help='Do not verify SSL certificates for HTTPS.') + parser.add_argument( + "--opal", + "-o", + required=False, + default="http://localhost:8080", + help="Opal server base url (default: http://localhost:8080)", + ) + parser.add_argument( + "--user", + "-u", + required=False, + help="Credentials auth: user name (requires a password)", + ) + parser.add_argument( + "--password", + "-p", + required=False, + nargs="?", + help="Credentials auth: user password (requires a user name)", + ) + parser.add_argument( + "--token", "-tk", required=False, help="Token auth: User access token" + ) + parser.add_argument( + "--ssl-cert", + "-sc", + required=False, + help="Two-way SSL auth: certificate/public key file (requires a private key)", + ) + parser.add_argument( + "--ssl-key", + "-sk", + required=False, + help="Two-way SSL auth: private key file (requires a certificate)", + ) + parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output") + parser.add_argument( + "--no-ssl-verify", + "-nv", + action="store_true", + help="Do not verify SSL certificates for HTTPS.", + ) + def add_subcommand(subparsers, name, help, add_args_func, default_func): """ - Make a sub-parser, add default arguments to it, add sub-command arguments and set the sub-command callback function. + Make a sub-parser, add default arguments to it, add sub-command arguments + and set the sub-command callback function. """ subparser = subparsers.add_parser(name, help=help) add_opal_arguments(subparser) add_args_func(subparser) subparser.set_defaults(func=default_func) + def run(): """ Command-line entry point. """ # Parse arguments - parser = argparse.ArgumentParser(description='Opal command line tool.') - subparsers = parser.add_subparsers(title='sub-commands', - help='Available sub-commands. Use --help option on the sub-command ' - 'for more details.') + parser = argparse.ArgumentParser(description="Opal command line tool.") + subparsers = parser.add_subparsers( + title="sub-commands", + help="Available sub-commands. Use --help option on the sub-command " + "for more details.", + ) # Add subcommands - add_subcommand(subparsers, 'project', 'Fetch, create, delete a project.', ProjectService.add_arguments, ProjectService.do_command) - add_subcommand(subparsers, 'dict', 'Query for data dictionary.', DictionaryService.add_arguments, DictionaryService.do_command) - add_subcommand(subparsers, 'data', 'Query for data.', DataService.add_arguments, DataService.do_command) - add_subcommand(subparsers, 'entity', 'Query for entities (Participant, etc.).', EntityService.add_arguments, EntityService.do_command) - add_subcommand(subparsers, 'file', 'Manage Opal file system.', FileService.add_arguments, FileService.do_command) - add_subcommand(subparsers, 'taxonomy', 'Manage taxonomies: list available taxonomies, download, import or delete a taxonomy.', TaxonomyService.add_arguments, TaxonomyService.do_command) - add_subcommand(subparsers, 'backup-project', - 'Backup project data: tables (data export), views, resources, report templates, files.', - BackupProjectCommand.add_arguments, BackupProjectCommand.do_command) - add_subcommand(subparsers, 'restore-project', - 'Restore project data: tables (data import), views, resources, report templates, files.', - RestoreProjectCommand.add_arguments, RestoreProjectCommand.do_command) - add_subcommand(subparsers, 'backup-view', 'Backup views of a project.', BackupViewService.add_arguments, BackupViewService.do_command) - add_subcommand(subparsers, 'restore-view', 'Restore views of a project.', RestoreViewService.add_arguments, - RestoreViewService.do_command) - add_subcommand(subparsers, 'import-opal', 'Import data from a remote Opal server.', ImportOpalCommand.add_arguments, - ImportOpalCommand.do_command) - add_subcommand(subparsers, 'import-csv', 'Import data from a CSV file.', ImportCSVCommand.add_arguments, - ImportCSVCommand.do_command) - add_subcommand(subparsers, 'import-xml', 'Import data from a ZIP file.', ImportXMLCommand.add_arguments, - ImportXMLCommand.do_command) - add_subcommand(subparsers, 'import-r-sas', 'Import data from a SAS or SAS Transport file (using R).', - ImportRSASCommand.add_arguments, - ImportRSASCommand.do_command) - add_subcommand(subparsers, 'import-r-stata', 'Import data from a Stata file (using R).', ImportRSTATACommand.add_arguments, - ImportRSTATACommand.do_command) - add_subcommand(subparsers, 'import-r-spss', 'Import data from a SPSS or compressed SPSS file (using R).', - ImportRSPSSCommand.add_arguments, - ImportRSPSSCommand.do_command) - add_subcommand(subparsers, 'import-r-rds', 'Import data from a RDS file (single serialized R object, expected to be a tibble, using R).', ImportRDSCommand.add_arguments, - ImportRDSCommand.do_command) - add_subcommand(subparsers, 'import-plugin', 'Import data from an Opal datasource plugin.', ImportPluginCommand.add_arguments, - ImportPluginCommand.do_command) - add_subcommand(subparsers, 'import-limesurvey', 'Import data from a LimeSurvey database.', ImportLimeSurveyCommand.add_arguments, - ImportLimeSurveyCommand.do_command) - add_subcommand(subparsers, 'import-sql', 'Import data from a SQL database.', ImportSQLCommand.add_arguments, - ImportSQLCommand.do_command) - add_subcommand(subparsers, 'import-vcf', 'Import genotypes data from some VCF/BCF files.', ImportVCFCommand.add_arguments, - ImportVCFCommand.do_command) - add_subcommand(subparsers, 'import-ids', 'Import system identifiers.', ImportIDService.add_arguments, - ImportIDService.do_command) - add_subcommand(subparsers, 'import-ids-map', 'Import identifiers mappings.', ImportIDMapService.add_arguments, - ImportIDMapService.do_command) - add_subcommand(subparsers, 'import-annot', - 'Apply data dictionary annotations specified in a file in CSV/TSV format (see export-annot).', - ImportAnnotationsService.add_arguments, ImportAnnotationsService.do_command) - add_subcommand(subparsers, 'export-xml', 'Export data to a zip of Opal XML files.', ExportXMLCommand.add_arguments, - ExportXMLCommand.do_command) - add_subcommand(subparsers, 'export-csv', 'Export data to a folder of CSV files.', ExportCSVCommand.add_arguments, - ExportCSVCommand.do_command) - add_subcommand(subparsers, 'export-r-sas', 'Export data to a SAS or SAS Transport file (using R).', ExportRSASCommand.add_arguments, - ExportRSASCommand.do_command) - add_subcommand(subparsers, 'export-r-stata', 'Export data to a Stata file (using R).', ExportRSTATACommand.add_arguments, - ExportRSTATACommand.do_command) - add_subcommand(subparsers, 'export-r-spss', 'Export data to a SPSS or compressed SPSS file (using R).', - ExportRSPSSCommand.add_arguments, - ExportRSPSSCommand.do_command) - add_subcommand(subparsers, 'export-r-rds', 'Export data to a RDS file (single serialized R object, using R).', ExportRDSCommand.add_arguments, - ExportRDSCommand.do_command) - add_subcommand(subparsers, 'export-sql', 'Export data to a SQL database.', ExportSQLCommand.add_arguments, - ExportSQLCommand.do_command) - add_subcommand(subparsers, 'export-plugin', 'Export data to a Opal datasource plugin.', ExportPluginCommand.add_arguments, - ExportPluginCommand.do_command) - add_subcommand(subparsers, 'export-vcf', 'Export genotypes data to VCF/BCF files.', ExportVCFCommand.add_arguments, - ExportVCFCommand.do_command) - add_subcommand(subparsers, 'export-annot', 'Extract data dictionary annotations in CSV/TSV format.', - ExportAnnotationsService.add_arguments, ExportAnnotationsService.do_command) - add_subcommand(subparsers, 'copy-table', 'Copy a table into another table.', CopyTableCommand.add_arguments, - CopyTableCommand.do_command) - add_subcommand(subparsers, 'delete-table', 'Delete some tables.', DeleteTableService.add_arguments, DeleteTableService.do_command) - add_subcommand(subparsers, 'user', 'Manage users.', UserService.add_arguments, UserService.do_command) - add_subcommand(subparsers, 'group', 'Manage groups.', GroupService.add_arguments, GroupService.do_command) - add_subcommand(subparsers, 'perm-project', 'Get or apply permission on a project.', ProjectPermService.add_arguments, - ProjectPermService.do_command) - add_subcommand(subparsers, 'perm-datasource', 'Get or apply permission on a datasource.', DatasourcePermService.add_arguments, - DatasourcePermService.do_command) - add_subcommand(subparsers, 'perm-table', 'Get or apply permission on a set of tables.', TablePermService.add_arguments, - TablePermService.do_command) - add_subcommand(subparsers, 'perm-variable', 'Get or apply permission on a set of variables.', VariablePermService.add_arguments, - VariablePermService.do_command) - add_subcommand(subparsers, 'perm-resources', 'Get or apply permission on resources as a whole.', ResourcesPermService.add_arguments, - ResourcesPermService.do_command) - add_subcommand(subparsers, 'perm-resource', 'Get or apply permission on a set of resources.', ResourcePermService.add_arguments, - ResourcePermService.do_command) - add_subcommand(subparsers, 'perm-r', 'Get or apply R permission.', RPermService.add_arguments, RPermService.do_command) - add_subcommand(subparsers, 'perm-datashield', 'Get or apply DataSHIELD permission.', DataSHIELDPermService.add_arguments, - DataSHIELDPermService.do_command) - add_subcommand(subparsers, 'perm-system', 'Get or apply system permission.', SystemPermService.add_arguments, SystemPermService.do_command) - add_subcommand(subparsers, 'plugin', 'Manage system plugins.', PluginService.add_arguments, - PluginService.do_command) - add_subcommand(subparsers, 'encrypt', "Encrypt string using Opal's secret key.", EncryptService.add_arguments, - EncryptService.do_command) - add_subcommand(subparsers, 'decrypt', "Decrypt string using Opal's secret key.", DecryptService.add_arguments, - DecryptService.do_command) - add_subcommand(subparsers, 'task', 'Manage a task.', TaskService.add_arguments, TaskService.do_command) - add_subcommand(subparsers, 'system', 'Query for system status and configuration.', SystemService.add_arguments, - SystemService.do_command) - add_subcommand(subparsers, 'rest', 'Request directly the Opal REST API, for advanced users.', RESTService.add_arguments, - RESTService.do_command) - add_subcommand(subparsers, 'analysis-plugin', 'Analyses a project variables using external R plugins.', - AnalysisCommand.add_arguments, - AnalysisCommand.do_command) - add_subcommand(subparsers, 'export-analysis-plugin', 'Exports analysis data of a project or specific tables.', - ExportAnalysisService.add_arguments, - ExportAnalysisService.do_command) - add_subcommand(subparsers, 'sql', 'Execute a SQL statement on project\'s tables.', - SQLService.add_arguments, - SQLService.do_command) - add_subcommand(subparsers, 'sql-history', 'SQL execution history of current user or of other users (administrator only).', - SQLHistoryService.add_arguments, - SQLHistoryService.do_command) + add_subcommand( + subparsers, + "project", + "Fetch, create, delete a project.", + ProjectService.add_arguments, + ProjectService.do_command, + ) + add_subcommand( + subparsers, + "dict", + "Query for data dictionary.", + DictionaryService.add_arguments, + DictionaryService.do_command, + ) + add_subcommand( + subparsers, + "data", + "Query for data.", + DataService.add_arguments, + DataService.do_command, + ) + add_subcommand( + subparsers, + "entity", + "Query for entities (Participant, etc.).", + EntityService.add_arguments, + EntityService.do_command, + ) + add_subcommand( + subparsers, + "file", + "Manage Opal file system.", + FileService.add_arguments, + FileService.do_command, + ) + add_subcommand( + subparsers, + "taxonomy", + "Manage taxonomies: list available taxonomies, download, import or " + "delete a taxonomy.", + TaxonomyService.add_arguments, + TaxonomyService.do_command, + ) + add_subcommand( + subparsers, + "backup-project", + "Backup project data: tables (data export), views, resources, report " + "templates, files.", + BackupProjectCommand.add_arguments, + BackupProjectCommand.do_command, + ) + add_subcommand( + subparsers, + "restore-project", + "Restore project data: tables (data import), views, resources, report " + "templates, files.", + RestoreProjectCommand.add_arguments, + RestoreProjectCommand.do_command, + ) + add_subcommand( + subparsers, + "backup-view", + "Backup views of a project.", + BackupViewService.add_arguments, + BackupViewService.do_command, + ) + add_subcommand( + subparsers, + "restore-view", + "Restore views of a project.", + RestoreViewService.add_arguments, + RestoreViewService.do_command, + ) + add_subcommand( + subparsers, + "import-opal", + "Import data from a remote Opal server.", + ImportOpalCommand.add_arguments, + ImportOpalCommand.do_command, + ) + add_subcommand( + subparsers, + "import-csv", + "Import data from a CSV file.", + ImportCSVCommand.add_arguments, + ImportCSVCommand.do_command, + ) + add_subcommand( + subparsers, + "import-xml", + "Import data from a ZIP file.", + ImportXMLCommand.add_arguments, + ImportXMLCommand.do_command, + ) + add_subcommand( + subparsers, + "import-r-sas", + "Import data from a SAS or SAS Transport file (using R).", + ImportRSASCommand.add_arguments, + ImportRSASCommand.do_command, + ) + add_subcommand( + subparsers, + "import-r-stata", + "Import data from a Stata file (using R).", + ImportRSTATACommand.add_arguments, + ImportRSTATACommand.do_command, + ) + add_subcommand( + subparsers, + "import-r-spss", + "Import data from a SPSS or compressed SPSS file (using R).", + ImportRSPSSCommand.add_arguments, + ImportRSPSSCommand.do_command, + ) + add_subcommand( + subparsers, + "import-r-rds", + "Import data from a RDS file (single serialized R object, expected to " + "be a tibble, using R).", + ImportRDSCommand.add_arguments, + ImportRDSCommand.do_command, + ) + add_subcommand( + subparsers, + "import-plugin", + "Import data from an Opal datasource plugin.", + ImportPluginCommand.add_arguments, + ImportPluginCommand.do_command, + ) + add_subcommand( + subparsers, + "import-limesurvey", + "Import data from a LimeSurvey database.", + ImportLimeSurveyCommand.add_arguments, + ImportLimeSurveyCommand.do_command, + ) + add_subcommand( + subparsers, + "import-sql", + "Import data from a SQL database.", + ImportSQLCommand.add_arguments, + ImportSQLCommand.do_command, + ) + add_subcommand( + subparsers, + "import-vcf", + "Import genotypes data from some VCF/BCF files.", + ImportVCFCommand.add_arguments, + ImportVCFCommand.do_command, + ) + add_subcommand( + subparsers, + "import-ids", + "Import system identifiers.", + ImportIDService.add_arguments, + ImportIDService.do_command, + ) + add_subcommand( + subparsers, + "import-ids-map", + "Import identifiers mappings.", + ImportIDMapService.add_arguments, + ImportIDMapService.do_command, + ) + add_subcommand( + subparsers, + "import-annot", + "Apply data dictionary annotations specified in a file in CSV/TSV " + "format (see export-annot).", + ImportAnnotationsService.add_arguments, + ImportAnnotationsService.do_command, + ) + add_subcommand( + subparsers, + "export-xml", + "Export data to a zip of Opal XML files.", + ExportXMLCommand.add_arguments, + ExportXMLCommand.do_command, + ) + add_subcommand( + subparsers, + "export-csv", + "Export data to a folder of CSV files.", + ExportCSVCommand.add_arguments, + ExportCSVCommand.do_command, + ) + add_subcommand( + subparsers, + "export-r-sas", + "Export data to a SAS or SAS Transport file (using R).", + ExportRSASCommand.add_arguments, + ExportRSASCommand.do_command, + ) + add_subcommand( + subparsers, + "export-r-stata", + "Export data to a Stata file (using R).", + ExportRSTATACommand.add_arguments, + ExportRSTATACommand.do_command, + ) + add_subcommand( + subparsers, + "export-r-spss", + "Export data to a SPSS or compressed SPSS file (using R).", + ExportRSPSSCommand.add_arguments, + ExportRSPSSCommand.do_command, + ) + add_subcommand( + subparsers, + "export-r-rds", + "Export data to a RDS file (single serialized R object, using R).", + ExportRDSCommand.add_arguments, + ExportRDSCommand.do_command, + ) + add_subcommand( + subparsers, + "export-sql", + "Export data to a SQL database.", + ExportSQLCommand.add_arguments, + ExportSQLCommand.do_command, + ) + add_subcommand( + subparsers, + "export-plugin", + "Export data to a Opal datasource plugin.", + ExportPluginCommand.add_arguments, + ExportPluginCommand.do_command, + ) + add_subcommand( + subparsers, + "export-vcf", + "Export genotypes data to VCF/BCF files.", + ExportVCFCommand.add_arguments, + ExportVCFCommand.do_command, + ) + add_subcommand( + subparsers, + "export-annot", + "Extract data dictionary annotations in CSV/TSV format.", + ExportAnnotationsService.add_arguments, + ExportAnnotationsService.do_command, + ) + add_subcommand( + subparsers, + "copy-table", + "Copy a table into another table.", + CopyTableCommand.add_arguments, + CopyTableCommand.do_command, + ) + add_subcommand( + subparsers, + "delete-table", + "Delete some tables.", + DeleteTableService.add_arguments, + DeleteTableService.do_command, + ) + add_subcommand( + subparsers, + "user", + "Manage users.", + UserService.add_arguments, + UserService.do_command, + ) + add_subcommand( + subparsers, + "group", + "Manage groups.", + GroupService.add_arguments, + GroupService.do_command, + ) + add_subcommand( + subparsers, + "perm-project", + "Get or apply permission on a project.", + ProjectPermService.add_arguments, + ProjectPermService.do_command, + ) + add_subcommand( + subparsers, + "perm-datasource", + "Get or apply permission on a datasource.", + DatasourcePermService.add_arguments, + DatasourcePermService.do_command, + ) + add_subcommand( + subparsers, + "perm-table", + "Get or apply permission on a set of tables.", + TablePermService.add_arguments, + TablePermService.do_command, + ) + add_subcommand( + subparsers, + "perm-variable", + "Get or apply permission on a set of variables.", + VariablePermService.add_arguments, + VariablePermService.do_command, + ) + add_subcommand( + subparsers, + "perm-resources", + "Get or apply permission on resources as a whole.", + ResourcesPermService.add_arguments, + ResourcesPermService.do_command, + ) + add_subcommand( + subparsers, + "perm-resource", + "Get or apply permission on a set of resources.", + ResourcePermService.add_arguments, + ResourcePermService.do_command, + ) + add_subcommand( + subparsers, + "perm-r", + "Get or apply R permission.", + RPermService.add_arguments, + RPermService.do_command, + ) + add_subcommand( + subparsers, + "perm-datashield", + "Get or apply DataSHIELD permission.", + DataSHIELDPermService.add_arguments, + DataSHIELDPermService.do_command, + ) + add_subcommand( + subparsers, + "perm-system", + "Get or apply system permission.", + SystemPermService.add_arguments, + SystemPermService.do_command, + ) + add_subcommand( + subparsers, + "plugin", + "Manage system plugins.", + PluginService.add_arguments, + PluginService.do_command, + ) + add_subcommand( + subparsers, + "encrypt", + "Encrypt string using Opal's secret key.", + EncryptService.add_arguments, + EncryptService.do_command, + ) + add_subcommand( + subparsers, + "decrypt", + "Decrypt string using Opal's secret key.", + DecryptService.add_arguments, + DecryptService.do_command, + ) + add_subcommand( + subparsers, + "task", + "Manage a task.", + TaskService.add_arguments, + TaskService.do_command, + ) + add_subcommand( + subparsers, + "system", + "Query for system status and configuration.", + SystemService.add_arguments, + SystemService.do_command, + ) + add_subcommand( + subparsers, + "rest", + "Request directly the Opal REST API, for advanced users.", + RESTService.add_arguments, + RESTService.do_command, + ) + add_subcommand( + subparsers, + "analysis-plugin", + "Analyses a project variables using external R plugins.", + AnalysisCommand.add_arguments, + AnalysisCommand.do_command, + ) + add_subcommand( + subparsers, + "export-analysis-plugin", + "Exports analysis data of a project or specific tables.", + ExportAnalysisService.add_arguments, + ExportAnalysisService.do_command, + ) + add_subcommand( + subparsers, + "sql", + "Execute a SQL statement on project's tables.", + SQLService.add_arguments, + SQLService.do_command, + ) + add_subcommand( + subparsers, + "sql-history", + "SQL execution history of current user or of other users (administrator only).", + SQLHistoryService.add_arguments, + SQLHistoryService.do_command, + ) # Execute selected command args = parser.parse_args() - if hasattr(args, 'func'): + if hasattr(args, "func"): try: - # Prompt for a missing password only when user/password is required - if not (args.ssl_cert or args.ssl_key) and not args.token: - if not args.password or len(args.password) == 0: - args.password = prompt_password() - args.func(args) + # Prompt for a missing password only when user/password is required + if not (args.ssl_cert or args.ssl_key) and not args.token: + if not args.password or len(args.password) == 0: + args.password = prompt_password() + args.func(args) except HTTPError as e: - Formatter.print_json(e.error, args.json if hasattr(args, 'json') else False) + Formatter.print_json(e.error, args.json if hasattr(args, "json") else False) sys.exit(2) except Exception as e: - if hasattr(e, 'message'): + if hasattr(e, "message"): print(e.message) else: print(e) sys.exit(2) else: - print('Opal command line tool.') - print('For more details: opal --help') + print("Opal command line tool.") + print("For more details: opal --help") diff --git a/obiba_opal/data.py b/obiba_opal/data.py index 957af73..ca04d26 100755 --- a/obiba_opal/data.py +++ b/obiba_opal/data.py @@ -6,6 +6,7 @@ import sys import os + class DataService: """ Extract identifiers, value sets and values from a table. @@ -20,14 +21,37 @@ def add_arguments(self, parser): """ Add data command specific options """ - parser.add_argument('name', - help='Fully qualified name of a table or a variable, for instance: opal-data.questionnaire or opal-data.questionnaire:Q1.') - parser.add_argument('--id', '-i', required=False, - help='Entity identifier. If missing the list of entities is returned.') - parser.add_argument('--raw', '-r', action='store_true', help='Get raw value, output to stdout, useful for downloading a binary value') - parser.add_argument('--pos', '-po', required=False, - help='Position of the value to query in case of a repeatable variable (starting at 0).') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "name", + help="Fully qualified name of a table or a variable, for instance: " + "opal-data.questionnaire or opal-data.questionnaire:Q1.", + ) + parser.add_argument( + "--id", + "-i", + required=False, + help="Entity identifier. If missing the list of entities is returned.", + ) + parser.add_argument( + "--raw", + "-r", + action="store_true", + help="Get raw value, output to stdout, useful for downloading " + "a binary value", + ) + parser.add_argument( + "--pos", + "-po", + required=False, + help="Position of the value to query in case of a repeatable " + "variable (starting at 0).", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -40,7 +64,9 @@ def do_command(self, args): fd = None if args.raw: fd = sys.stdout.fileno() - res = DataService(client, args.verbose)._get_data(args.name, args.id, args.pos, fd) + res = DataService(client, args.verbose)._get_data( + args.name, args.id, args.pos, fd + ) # format response core.Formatter.print_json(res, args.json) finally: @@ -53,7 +79,7 @@ def get_entities(self, project: str, table: str) -> list: :param project: The project name :param table: The table name """ - return self._get_data('%s.%s' % (project, table)) + return self._get_data(f"{project}.{table}") def get_valueset(self, project: str, table: str, id: str) -> dict: """ @@ -63,28 +89,37 @@ def get_valueset(self, project: str, table: str, id: str) -> dict: :param table: The table name :param id: The entity identifier """ - return self._get_data('%s.%s' % (project, table), id) + return self._get_data(f"{project}.{table}", id) - def get_value(self, project: str, table: str, variable: str, id: str, pos: str = None, fd = None) -> dict: + def get_value( + self, project: str, table: str, variable: str, id: str, pos: str = None, fd=None + ) -> dict: """ Get the variable value of an entity in a project's table. :param project: The project name :param table: The table name :param id: The entity identifier - :param pos: Position of the value to query in case of a repeatable variable (starting at 0) - :param fd: Get raw value into the provided file descriptor (see os.fdopen()), useful for downloading a binary value + :param pos: Position of the value to query in case of a repeatable + variable (starting at 0) + :param fd: Get raw value into the provided file descriptor + (see os.fdopen()), useful for downloading a binary value """ - return self._get_data('%s.%s:%s' % (project, table, variable), id, pos, fd) + return self._get_data(f"{project}.{table}:{variable}", id, pos, fd) - def _get_data(self, name: str, id: str = None, pos: str = None, fd = None) -> any: + def _get_data(self, name: str, id: str = None, pos: str = None, fd=None) -> any: """ Execute data command - :param name: Fully qualified name of a table or a variable, for instance: opal-data.questionnaire or opal-data.questionnaire:Q1 - :param id: Entity identifier. If missing the list of entities is returned - :param pos: Position of the value to query in case of a repeatable variable (starting at 0) - :param fd: Get raw value into the provided file descriptor (see os.fdopen()), useful for downloading a binary value + :param name: Fully qualified name of a table or a variable, for + instance: opal-data.questionnaire or + opal-data.questionnaire:Q1 + :param id: Entity identifier. If missing the list of entities is + returned + :param pos: Position of the value to query in case of a repeatable + variable (starting at 0) + :param fd: Get raw value into the provided file descriptor + (see os.fdopen()), useful for downloading a binary value """ request = self.client.new_request() if self.verbose: @@ -95,31 +130,38 @@ def _get_data(self, name: str, id: str = None, pos: str = None, fd = None) -> an ws = self._make_ws(resolver, id, pos, raw) request.fail_on_error().get().resource(ws) if raw: - fp = os.fdopen(fd, 'wb') - response = request.accept('*/*').send(fp) + fp = os.fdopen(fd, "wb") + response = request.accept("*/*").send(fp) fp.flush() return None else: response = request.send() return response.from_json() - def _make_ws(self, resolver: core.MagmaNameResolver, id: str = None, pos: str = None, raw: bool = False): + def _make_ws( + self, + resolver: core.MagmaNameResolver, + id: str = None, + pos: str = None, + raw: bool = False, + ): """ Build the web service resource path """ ws = resolver.get_table_ws() if id: - ws = '%s/valueSet/%s' % (ws, id) + ws = f"{ws}/valueSet/{id}" if resolver.is_variable(): - ws = '%s/variable/%s' % (ws, resolver.variable) + ws = f"{ws}/variable/{resolver.variable}" if raw: - ws = '%s/value' % ws + ws = f"{ws}/value" if pos: - ws = ws + '%s?pos=%s' % (ws, pos) + ws = ws + f"?pos={pos}" else: - ws = '%s/entities' % ws + ws = f"{ws}/entities" return ws + class EntityService: """ Get information about entities. @@ -134,11 +176,26 @@ def add_arguments(self, parser): """ Add variable command specific options """ - parser.add_argument('id', help='Identifier of the entity.') - parser.add_argument('--type', '-ty', required=False, help='Type of the entity. Default type is Participant.') - parser.add_argument('--tables', '-ta', action='store_true', - help='Get the list of tables in which the entity with given identifier exists.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("id", help="Identifier of the entity.") + parser.add_argument( + "--type", + "-ty", + required=False, + help="Type of the entity. Default type is Participant.", + ) + parser.add_argument( + "--tables", + "-ta", + action="store_true", + help="Get the list of tables in which the entity with given " + "identifier exists.", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -150,7 +207,9 @@ def do_command(self, args): try: res = None if args.tables: - res = EntityService(client, args.verbose).get_entity_tables(args.id, args.type) + res = EntityService(client, args.verbose).get_entity_tables( + args.id, args.type + ) else: res = EntityService(client, args.verbose).get_entity(args.id, args.type) @@ -164,7 +223,13 @@ def get_entity(self, id: str, type: str = None) -> dict: if self.verbose: request.verbose() # send request - response = request.fail_on_error().accept_json().get().resource(self._make_ws(id, type, False)).send() + response = ( + request.fail_on_error() + .accept_json() + .get() + .resource(self._make_ws(id, type, False)) + .send() + ) return response.from_json() def get_entity_tables(self, id: str, type: str = None) -> list: @@ -172,18 +237,21 @@ def get_entity_tables(self, id: str, type: str = None) -> list: if self.verbose: request.verbose() # send request - response = request.fail_on_error().accept_json().get().resource(self._make_ws(id, type, True)).send() + response = ( + request.fail_on_error() + .accept_json() + .get() + .resource(self._make_ws(id, type, True)) + .send() + ) return response.from_json() def _make_ws(self, id: str, type: str = None, tables: bool = False): """ Build the web service resource path """ - ws = '/entity/%s/type/' % id - if type: - ws = ws + type - else: - ws = ws + 'Participant' + ws = f"/entity/{id}/type/" + ws = ws + type if type else ws + "Participant" if tables: - ws = ws + '/tables' - return ws \ No newline at end of file + ws = ws + "/tables" + return ws diff --git a/obiba_opal/dictionary.py b/obiba_opal/dictionary.py index d052f71..45e65fc 100755 --- a/obiba_opal/dictionary.py +++ b/obiba_opal/dictionary.py @@ -26,10 +26,25 @@ def add_arguments(cls, parser): """ Add variable command specific options """ - parser.add_argument('name', - help='Fully qualified name of a datasource/project or a table or a variable, for instance: opal-data or opal-data.questionnaire or opal-data.questionnaire:Q1. Wild cards can also be used, for instance: "*", "opal-data.*", etc.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') - parser.add_argument('--excel', '-xls', required=False, help='Full path of the target data dictionary Excel file.') + parser.add_argument( + "name", + help="Fully qualified name of a datasource/project or a table or " + "a variable, for instance: opal-data or opal-data.questionnaire " + "or opal-data.questionnaire:Q1. Wild cards can also be used, " + 'for instance: "*", "opal-data.*", etc.', + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) + parser.add_argument( + "--excel", + "-xls", + required=False, + help="Full path of the target data dictionary Excel file.", + ) @classmethod def do_command(cls, args): @@ -45,7 +60,7 @@ def do_command(cls, args): res = service._get_dictionary_as_excel(args.name) with open(args.excel, mode="wb") as excelFile: excelFile.write(res) - else: + else: res = service._get_dictionary() # format response @@ -57,7 +72,7 @@ def get_datasources(self) -> list: """ Get the list of datasources. """ - return self._get_dictionary('*') + return self._get_dictionary("*") def get_datasource(self, project: str) -> dict: """ @@ -73,7 +88,7 @@ def get_tables(self, project: str) -> list: :param project: The project name associated to the datasource """ - return self._get_dictionary('%s.*' % project) + return self._get_dictionary(f"{project}.*") def get_table(self, project: str, table: str) -> dict: """ @@ -82,7 +97,7 @@ def get_table(self, project: str, table: str) -> dict: :param project: The project name associated to the datasource :param table: The table name """ - return self._get_dictionary('%s.%s' % (project, table)) + return self._get_dictionary(f"{project}.{table}") def get_variables(self, project: str, table: str) -> list: """ @@ -91,7 +106,7 @@ def get_variables(self, project: str, table: str) -> list: :param project: The project name associated to the datasource :param table: The table name """ - return self._get_dictionary('%s.%s:*' % (project, table)) + return self._get_dictionary(f"{project}.{table}:*") def get_variable(self, project: str, table: str, variable: str) -> list: """ @@ -101,7 +116,7 @@ def get_variable(self, project: str, table: str, variable: str) -> list: :param table: The table name :param variable: The variable name """ - return self._get_dictionary('%s.%s:%s' % (project, table, variable)) + return self._get_dictionary(f"{project}.{table}:{variable}") def delete_tables(self, project: str, tables: list = None): """ @@ -115,19 +130,25 @@ def delete_tables(self, project: str, tables: list = None): tables_ = tables if not tables: tables_ = self.get_tables(project) - tables_ = [x['name'] for x in tables_] + tables_ = [x["name"] for x in tables_] for table in tables_: request = self.client.new_request() if self.verbose: request.verbose() - request.fail_on_error().delete().resource(core.UriBuilder(['datasource', project, 'table', table]).build()).send() + request.fail_on_error().delete().resource( + core.UriBuilder(["datasource", project, "table", table]).build() + ).send() def _get_dictionary(self, name: str) -> any: """ Get dictionary items by their full name, with wild-card support. - :param name: Fully qualified name of a datasource/project or a table or a variable, for instance: opal-data or opal-data.questionnaire or opal-data.questionnaire:Q1. Wild cards can also be used, for instance: "*", "opal-data.*", etc. + :param name: Fully qualified name of a datasource/project or a table + or a variable, for instance: opal-data or + opal-data.questionnaire or opal-data.questionnaire:Q1. + Wild cards can also be used, for instance: "*", + "opal-data.*", etc. """ request = self.client.new_request() request.fail_on_error().accept_json() @@ -144,7 +165,11 @@ def _get_dictionary_as_excel(self, name: str) -> any: """ Get dictionary items by their full name, with wild-card support. - :param name: Fully qualified name of a datasource/project or a table or a variable, for instance: opal-data or opal-data.questionnaire or opal-data.questionnaire:Q1. Wild cards can also be used, for instance: "*", "opal-data.*", etc. + :param name: Fully qualified name of a datasource/project or a table + or a variable, for instance: opal-data or + opal-data.questionnaire or opal-data.questionnaire:Q1. + Wild cards can also be used, for instance: "*", + "opal-data.*", etc. """ request = self.client.new_request() request.fail_on_error().accept("application/vnd.ms-excel") @@ -157,7 +182,10 @@ def _get_dictionary_as_excel(self, name: str) -> any: resolver = core.MagmaNameResolver(name) if not resolver.is_variables(): - raise Exception("Excel data dictionaries must be for all variables, use '.:*' format for resource.") + raise Exception( + "Excel data dictionaries must be for all variables, use " + "'.
:*' format for resource." + ) request.get().resource(f"{resolver.get_ws()}/excel") response = request.send() @@ -179,16 +207,38 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('name', - help='Fully qualified name of a datasource/project or a table or a variable, for instance: opal-data or opal-data.questionnaire or opal-data.questionnaire:Q1. Wild cards can also be used, for instance: "opal-data.*", etc.') - parser.add_argument('--output', '-out', help='CSV/TSV file to output (default is stdout)', - type=argparse.FileType('w'), default=sys.stdout) - parser.add_argument('--locale', '-l', required=False, - help='Exported locale (default is none)') - parser.add_argument('--separator', '-s', required=False, - help='Separator char for CSV/TSV format (default is the tabulation character)') - parser.add_argument('--taxonomies', '-tx', nargs='+', required=False, - help='The list of taxonomy names of interest (default is any that are found in the variable attributes)') + parser.add_argument( + "name", + help="Fully qualified name of a datasource/project or a table or " + "a variable, for instance: opal-data or opal-data.questionnaire " + "or opal-data.questionnaire:Q1. Wild cards can also be used, " + 'for instance: "opal-data.*", etc.', + ) + parser.add_argument( + "--output", + "-out", + help="CSV/TSV file to output (default is stdout)", + type=argparse.FileType("w"), + default=sys.stdout, + ) + parser.add_argument( + "--locale", "-l", required=False, help="Exported locale (default is none)" + ) + parser.add_argument( + "--separator", + "-s", + required=False, + help="Separator char for CSV/TSV format (default is the " + "tabulation character)", + ) + parser.add_argument( + "--taxonomies", + "-tx", + nargs="+", + required=False, + help="The list of taxonomy names of interest (default is any that " + "are found in the variable attributes)", + ) @classmethod def do_command(cls, args): @@ -197,24 +247,65 @@ def do_command(cls, args): """ # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) - sep = args.separator if args.separator else '\t' - ExportAnnotationsService(client, args.verbose)._export_annotations(args.name, args.output, sep = sep, taxonomies = args.taxonomies, locale = args.locale) - - def export_project_annotations(self, project: str, output, sep: str = '\t', taxonomies: list = None, locale: str = None): + sep = args.separator if args.separator else "\t" + ExportAnnotationsService(client, args.verbose)._export_annotations( + args.name, + args.output, + sep=sep, + taxonomies=args.taxonomies, + locale=args.locale, + ) + + def export_project_annotations( + self, + project: str, + output, + sep: str = "\t", + taxonomies: list = None, + locale: str = None, + ): self._export_annotations(project, output, sep, taxonomies, locale) - def export_table_annotations(self, project: str, table: str, output, sep: str = '\t', taxonomies: list = None, locale: str = None): - self._export_annotations('%s.%s' % (project, table), output, sep, taxonomies, locale) - - def export_variable_annotations(self, project: str, table: str, variable: str, output, sep: str = '\t', taxonomies: list = None, locale: str = None): - self._export_annotations('%s.%s:%s' % (project, table, variable), output, sep, taxonomies, locale) - - def _export_annotations(self, name: str, output, sep: str = '\t', taxonomies: list = None, locale: str = None): + def export_table_annotations( + self, + project: str, + table: str, + output, + sep: str = "\t", + taxonomies: list = None, + locale: str = None, + ): + self._export_annotations(f"{project}.{table}", output, sep, taxonomies, locale) + + def export_variable_annotations( + self, + project: str, + table: str, + variable: str, + output, + sep: str = "\t", + taxonomies: list = None, + locale: str = None, + ): + self._export_annotations( + f"{project}.{table}:{variable}", output, sep, taxonomies, locale + ) + + def _export_annotations( + self, + name: str, + output, + sep: str = "\t", + taxonomies: list = None, + locale: str = None, + ): writer = csv.writer(output, delimiter=sep) - writer.writerow(['project', 'table', 'variable', 'namespace', 'name', 'value']) + writer.writerow(["project", "table", "variable", "namespace", "name", "value"]) self._handle_item(writer, name, taxonomies, locale) - def _handle_item(self, writer, name: str, taxonomies: list = None, locale: str = None): + def _handle_item( + self, writer, name: str, taxonomies: list = None, locale: str = None + ): # print 'Handling ' + name request = self.client.new_request() request.fail_on_error().accept_json() @@ -228,7 +319,7 @@ def _handle_item(self, writer, name: str, taxonomies: list = None, locale: str = response = request.send() if resolver.is_datasources(): - raise Exception('Wildcard not allowed for datasources/projects') + raise Exception("Wildcard not allowed for datasources/projects") res = response.from_json() if resolver.is_datasource(): @@ -237,30 +328,71 @@ def _handle_item(self, writer, name: str, taxonomies: list = None, locale: str = self._handle_table(writer, res, taxonomies, locale) if resolver.is_variables(): for variable in res: - self._handle_variable(writer, resolver.datasource, resolver.table, variable, taxonomies, locale) + self._handle_variable( + writer, + resolver.datasource, + resolver.table, + variable, + taxonomies, + locale, + ) if resolver.is_variable(): - self._handle_variable(writer, resolver.datasource, resolver.table, res, taxonomies, locale) - - def _handle_datasource(self, writer, datasourceObject, taxonomies: list = None, locale: str = None): - for table in datasourceObject['table']: - self._handle_item(writer, datasourceObject['name'] + '.' + table + ':*', taxonomies, locale) - - def _handle_table(self, writer, tableObject, taxonomies: list = None, locale: str = None): - self._handle_item(writer, tableObject['datasourceName'] + '.' + tableObject['name'] + ':*', taxonomies, locale) - - def _handle_variable(self, writer, datasource, table, variableObject, taxonomies: list = None, locale: str = None): - if 'attributes' in variableObject: - for attribute in variableObject['attributes']: - do_search = 'namespace' in attribute and 'locale' in attribute \ - and locale in attribute['locale'] \ - if locale \ - else 'namespace' in attribute and 'locale' not in attribute + self._handle_variable( + writer, resolver.datasource, resolver.table, res, taxonomies, locale + ) + + def _handle_datasource( + self, writer, datasourceObject, taxonomies: list = None, locale: str = None + ): + for table in datasourceObject["table"]: + self._handle_item( + writer, + datasourceObject["name"] + "." + table + ":*", + taxonomies, + locale, + ) + + def _handle_table( + self, writer, tableObject, taxonomies: list = None, locale: str = None + ): + self._handle_item( + writer, + tableObject["datasourceName"] + "." + tableObject["name"] + ":*", + taxonomies, + locale, + ) + + def _handle_variable( + self, + writer, + datasource, + table, + variableObject, + taxonomies: list = None, + locale: str = None, + ): + if "attributes" in variableObject: + for attribute in variableObject["attributes"]: + do_search = ( + "namespace" in attribute + and "locale" in attribute + and locale in attribute["locale"] + if locale + else "namespace" in attribute and "locale" not in attribute + ) if do_search: - if not taxonomies or attribute['namespace'] in taxonomies: - row = [datasource, table, variableObject['name'], attribute['namespace'], attribute['name'], - attribute['value']] + if not taxonomies or attribute["namespace"] in taxonomies: + row = [ + datasource, + table, + variableObject["name"], + attribute["namespace"], + attribute["name"], + attribute["value"], + ] writer.writerow(row) + class ImportAnnotationsService: """ Import dictionary annotations from previous export. @@ -275,19 +407,50 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--input', '-in', - help='CSV/TSV input file, typically the output of the "export-annot" command (default is stdin)', - type=argparse.FileType('r'), default=sys.stdin) - parser.add_argument('--locale', '-l', required=False, - help='Destination annotation locale (default is none)') - parser.add_argument('--separator', '-s', required=False, - help='Separator char for CSV/TSV format (default is the tabulation character)') - parser.add_argument('--destination', '-d', required=False, - help='Destination datasource name (default is the one(s) specified in the input file)') - parser.add_argument('--tables', '-t', nargs='+', required=False, - help='The list of tables which variables are to be annotated (defaults to all that are found in the input file)') - parser.add_argument('--taxonomies', '-tx', nargs='+', required=False, - help='The list of taxonomy names of interest (default is any that is found in the input file)') + parser.add_argument( + "--input", + "-in", + help="CSV/TSV input file, typically the output of the " + '"export-annot" command (default is stdin)', + type=argparse.FileType("r"), + default=sys.stdin, + ) + parser.add_argument( + "--locale", + "-l", + required=False, + help="Destination annotation locale (default is none)", + ) + parser.add_argument( + "--separator", + "-s", + required=False, + help="Separator char for CSV/TSV format (default is the " + "tabulation character)", + ) + parser.add_argument( + "--destination", + "-d", + required=False, + help="Destination datasource name (default is the one(s) " + "specified in the input file)", + ) + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=False, + help="The list of tables which variables are to be annotated " + "(defaults to all that are found in the input file)", + ) + parser.add_argument( + "--taxonomies", + "-tx", + nargs="+", + required=False, + help="The list of taxonomy names of interest (default is any that " + "is found in the input file)", + ) @classmethod def do_command(cls, args): @@ -297,10 +460,25 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) service = ImportAnnotationsService(client, args.verbose) - sep = args.separator if args.separator else '\t' - service.import_annotations(args.input, sep=sep, tables=args.tables, taxonomies=args.taxonomies, destination=args.destination, locale=args.locale) - - def import_annotations(self, input, sep: str = '\t', tables: list = None, taxonomies: list = None, destination: str = None, locale: str = None): + sep = args.separator if args.separator else "\t" + service.import_annotations( + args.input, + sep=sep, + tables=args.tables, + taxonomies=args.taxonomies, + destination=args.destination, + locale=args.locale, + ) + + def import_annotations( + self, + input, + sep: str = "\t", + tables: list = None, + taxonomies: list = None, + destination: str = None, + locale: str = None, + ): reader = csv.reader(input, delimiter=sep) next(reader) # skip header value_map = {} @@ -316,23 +494,34 @@ def import_annotations(self, input, sep: str = '\t', tables: list = None, taxono for name in value_map[datasource][table][namespace]: for value in value_map[datasource][table][namespace][name]: ds = destination if destination else datasource - variables = value_map[datasource][table][namespace][name][value] - self._annotate(ds, table, namespace, name, value, variables, locale) - - def _annotate(self, datasource, table, namespace, name, value, variables, locale: str = None): + variables = value_map[datasource][table][namespace][ + name + ][value] + self._annotate( + ds, table, namespace, name, value, variables, locale + ) + + def _annotate( + self, datasource, table, namespace, name, value, variables, locale: str = None + ): request = self.client.new_request() request.fail_on_error().accept_json() - params = {'namespace': namespace, 'name': name, 'value': value} + params = {"namespace": namespace, "name": name, "value": value} if locale: - params['locale'] = locale + params["locale"] = locale - builder = core.UriBuilder(['datasource', datasource, 'table', table, 'variables', '_attribute'], params=params) - form = '&'.join([urllib.parse.urlencode({'variable': x}) for x in variables]) + builder = core.UriBuilder( + ["datasource", datasource, "table", table, "variables", "_attribute"], + params=params, + ) + form = "&".join([urllib.parse.urlencode({"variable": x}) for x in variables]) if self.verbose: request.verbose() - request.put().resource(builder.build()).content_type_form_urlencoded().content(form).send() + request.put().resource(builder.build()).content_type_form_urlencoded().content( + form + ).send() def _append_row(self, dictionary, row, tables=None, taxonomies=None): if row[0] not in dictionary: diff --git a/obiba_opal/exports.py b/obiba_opal/exports.py index 1863c2e..39f4b88 100644 --- a/obiba_opal/exports.py +++ b/obiba_opal/exports.py @@ -17,12 +17,32 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--name', '-n', required=True, help='Opal datasource plugin name') - parser.add_argument('--config', '-c', required=True, help='A JSON file containing the export configuration') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--name", "-n", required=True, help="Opal datasource plugin name" + ) + parser.add_argument( + "--config", + "-c", + required=True, + help="A JSON file containing the export configuration", + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -31,16 +51,25 @@ def do_command(cls, args): """ # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) - config = json.loads(open(args.config).read()) + with open(args.config) as f: + config = json.loads(f.read()) try: - res = cls(client, args.verbose) \ - .export_data(args.name, args.datasource, args.tables, config, args.identifiers) + res = cls(client, args.verbose).export_data( + args.name, args.datasource, args.tables, config, args.identifiers + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, name: str, project: str, tables: list, config: str, identifiers: str = None) -> dict: + def export_data( + self, + name: str, + project: str, + tables: list, + config: str, + identifiers: str = None, + ) -> dict: """ Export tables using a plugin. @@ -51,12 +80,18 @@ def export_data(self, name: str, project: str, tables: list, config: str, identi :param identifiers: The name of the ID mapping """ configStr = json.dumps(config) - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, - identifiers=identifiers, output=configStr, - verbose=self.verbose) + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + identifiers=identifiers, + output=configStr, + verbose=self.verbose, + ) response = exporter.submit(name) return response.from_json() + class ExportCSVCommand: """ Export some tables in CSV format. @@ -71,14 +106,35 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, help='Output directory name') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--no-multilines', '-nl', action='store_true', - help='Do not write value sequences as multiple lines') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", "-out", required=True, help="Output directory name" + ) + parser.add_argument( + "--id-name", "-in", required=False, help="Name of the ID column name" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--no-multilines", + "-nl", + action="store_true", + help="Do not write value sequences as multiple lines", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -88,14 +144,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.id_name, args.identifiers, not args.no_multilines) + res = cls(client, args.verbose).export_data( + args.datasource, + args.tables, + args.output, + args.id_name, + args.identifiers, + not args.no_multilines, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, id_name: str = None, identifiers: str = None, multilines: bool = True) -> dict: + def export_data( + self, + project: str, + tables: list, + output: str, + id_name: str = None, + identifiers: str = None, + multilines: bool = True, + ) -> dict: """ Export tables in CSV files. @@ -106,12 +176,20 @@ def export_data(self, project: str, tables: list, output: str, id_name: str = No :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, entityIdNames = id_name, - identifiers=identifiers, output=output, - multilines=multilines, verbose=self.verbose) - response = exporter.submit('csv') + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + entityIdNames=id_name, + identifiers=identifiers, + output=output, + multilines=multilines, + verbose=self.verbose, + ) + response = exporter.submit("csv") return response.from_json() + class ExportRDSCommand: """ Data export in RDS (using R). @@ -126,14 +204,35 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, help='Output file name (.rds)') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--no-multilines', '-nl', action='store_true', - help='Do not write value sequences as multiple lines') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", "-out", required=True, help="Output file name (.rds)" + ) + parser.add_argument( + "--id-name", "-in", required=False, help="Name of the ID column name" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--no-multilines", + "-nl", + action="store_true", + help="Do not write value sequences as multiple lines", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -143,14 +242,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.id_name, args.identifiers, not args.no_multilines) + res = cls(client, args.verbose).export_data( + args.datasource, + args.tables, + args.output, + args.id_name, + args.identifiers, + not args.no_multilines, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, id_name: str = None, identifiers: str = None, multilines: bool = True) -> dict: + def export_data( + self, + project: str, + tables: list, + output: str, + id_name: str = None, + identifiers: str = None, + multilines: bool = True, + ) -> dict: """ Export tables in a RDS file. @@ -161,15 +274,23 @@ def export_data(self, project: str, tables: list, output: str, id_name: str = No :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - if not (output.endswith('.rds')): - raise Exception('Output must be a RDS file (.rds).') - - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, entityIdNames = id_name, - identifiers=identifiers, output=output, - multilines=multilines, verbose=self.verbose) - response = exporter.submit('RDS') + if not (output.endswith(".rds")): + raise Exception("Output must be a RDS file (.rds).") + + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + entityIdNames=id_name, + identifiers=identifiers, + output=output, + multilines=multilines, + verbose=self.verbose, + ) + response = exporter.submit("RDS") return response.from_json() + class ExportRSASCommand: """ Data export in SAS (using R). @@ -184,15 +305,38 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, - help='Output file name (.sas7bdat or .xpt (Transport format))') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--no-multilines', '-nl', action='store_true', - help='Do not write value sequences as multiple lines') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", + "-out", + required=True, + help="Output file name (.sas7bdat or .xpt (Transport format))", + ) + parser.add_argument( + "--id-name", "-in", required=False, help="Name of the ID column name" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--no-multilines", + "-nl", + action="store_true", + help="Do not write value sequences as multiple lines", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -202,14 +346,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.id_name, args.identifiers, not args.no_multilines) + res = cls(client, args.verbose).export_data( + args.datasource, + args.tables, + args.output, + args.id_name, + args.identifiers, + not args.no_multilines, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, id_name: str = None, identifiers: str = None, multilines: bool = True) -> dict: + def export_data( + self, + project: str, + tables: list, + output: str, + id_name: str = None, + identifiers: str = None, + multilines: bool = True, + ) -> dict: """ Export tables in a SAS file. @@ -220,17 +378,24 @@ def export_data(self, project: str, tables: list, output: str, id_name: str = No :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - if not (output.endswith('.sas7bdat')) and not (output.endswith('.xpt')): - raise Exception('Output must be a SAS file (.sas7bdat or .xpt).') - - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, entityIdNames = id_name, - identifiers=identifiers, output=output, - multilines=multilines, verbose=self.verbose) + if not (output.endswith(".sas7bdat")) and not (output.endswith(".xpt")): + raise Exception("Output must be a SAS file (.sas7bdat or .xpt).") + + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + entityIdNames=id_name, + identifiers=identifiers, + output=output, + multilines=multilines, + verbose=self.verbose, + ) response = None - if output.endswith('.sas7bdat'): - response = exporter.submit('RSAS') + if output.endswith(".sas7bdat"): + response = exporter.submit("RSAS") else: - response = exporter.submit('RXPT') + response = exporter.submit("RXPT") return response.from_json() @@ -248,14 +413,38 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, help='Output file name (.sav or .zsav (compressed format))') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--no-multilines', '-nl', action='store_true', - help='Do not write value sequences as multiple lines') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", + "-out", + required=True, + help="Output file name (.sav or .zsav (compressed format))", + ) + parser.add_argument( + "--id-name", "-in", required=False, help="Name of the ID column name" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--no-multilines", + "-nl", + action="store_true", + help="Do not write value sequences as multiple lines", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -265,14 +454,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.id_name, args.identifiers, not args.no_multilines) + res = cls(client, args.verbose).export_data( + args.datasource, + args.tables, + args.output, + args.id_name, + args.identifiers, + not args.no_multilines, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, id_name: str = None, identifiers: str = None, multilines: bool = True) -> dict: + def export_data( + self, + project: str, + tables: list, + output: str, + id_name: str = None, + identifiers: str = None, + multilines: bool = True, + ) -> dict: """ Export tables in a SPSS file. @@ -283,17 +486,24 @@ def export_data(self, project: str, tables: list, output: str, id_name: str = No :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - if not (output.endswith('.sav')) and not (output.endswith('.zsav')): - raise Exception('Output must be a SPSS file (.sav or .zsav).') - - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, entityIdNames = id_name, - identifiers=identifiers, output=output, - multilines=multilines, verbose=self.verbose) + if not (output.endswith(".sav")) and not (output.endswith(".zsav")): + raise Exception("Output must be a SPSS file (.sav or .zsav).") + + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + entityIdNames=id_name, + identifiers=identifiers, + output=output, + multilines=multilines, + verbose=self.verbose, + ) response = None - if output.endswith('.sav'): - response = exporter.submit('RSPSS') + if output.endswith(".sav"): + response = exporter.submit("RSPSS") else: - response = exporter.submit('RZSPSS') + response = exporter.submit("RZSPSS") return response.from_json() @@ -311,14 +521,35 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, help='Output file name (.dta)') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--no-multilines', '-nl', action='store_true', - help='Do not write value sequences as multiple lines') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", "-out", required=True, help="Output file name (.dta)" + ) + parser.add_argument( + "--id-name", "-in", required=False, help="Name of the ID column name" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--no-multilines", + "-nl", + action="store_true", + help="Do not write value sequences as multiple lines", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -328,14 +559,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.id_name, args.identifiers, not args.no_multilines) + res = cls(client, args.verbose).export_data( + args.datasource, + args.tables, + args.output, + args.id_name, + args.identifiers, + not args.no_multilines, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, id_name: str = None, identifiers: str = None, multilines: bool = True) -> dict: + def export_data( + self, + project: str, + tables: list, + output: str, + id_name: str = None, + identifiers: str = None, + multilines: bool = True, + ) -> dict: """ Export tables in a STATA file. @@ -346,13 +591,20 @@ def export_data(self, project: str, tables: list, output: str, id_name: str = No :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - if not (output.endswith('.dta')): - raise Exception('Output must be a Stata file (.dta).') - - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, entityIdNames = id_name, - identifiers=identifiers, output=output, - multilines=multilines, verbose=self.verbose) - response = exporter.submit('RSTATA') + if not (output.endswith(".dta")): + raise Exception("Output must be a Stata file (.dta).") + + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + entityIdNames=id_name, + identifiers=identifiers, + output=output, + multilines=multilines, + verbose=self.verbose, + ) + response = exporter.submit("RSTATA") return response.from_json() @@ -370,11 +622,26 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--database', '-db', required=True, help='Name of the SQL database') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--database", "-db", required=True, help="Name of the SQL database" + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -384,28 +651,38 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.database, args.identifiers) + res = cls(client, args.verbose).export_data( + args.datasource, args.tables, args.database, args.identifiers + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, database: str, identifiers: str = None): + def export_data( + self, project: str, tables: list, database: str, identifiers: str = None + ): """ Export tables in a SQL database. :param project: The project name :param tables: The table names to export - :param database: The SQL database name. See ProjectService.get_databases() for a list of databases with 'export' usage. + :param database: The SQL database name. See ProjectService.get_databases() + for a list of databases with 'export' usage. :param identifiers: The name of the ID mapping """ - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, - identifiers=identifiers, output=database, - verbose=self.verbose) - response = exporter.submit('jdbc') + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + identifiers=identifiers, + output=database, + verbose=self.verbose, + ) + response = exporter.submit("jdbc") return response.from_json() + class ExportXMLCommand: """ Data export in XML. @@ -420,11 +697,29 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--datasource', '-d', required=True, help='Project name') - parser.add_argument('--tables', '-t', nargs='+', required=True, help='The list of tables to be exported') - parser.add_argument('--output', '-out', required=True, help='Output zip file name that will be exported') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--datasource", "-d", required=True, help="Project name") + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=True, + help="The list of tables to be exported", + ) + parser.add_argument( + "--output", + "-out", + required=True, + help="Output zip file name that will be exported", + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -434,14 +729,17 @@ def do_command(cls, args): # Check output filename extension client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose) \ - .export_data(args.datasource, args.tables, args.output, args.identifiers) + res = cls(client, args.verbose).export_data( + args.datasource, args.tables, args.output, args.identifiers + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data(self, project: str, tables: list, output: str, identifiers: str = None) -> dict: + def export_data( + self, project: str, tables: list, output: str, identifiers: str = None + ) -> dict: """ Export tables in an Opal archive file. @@ -452,14 +750,19 @@ def export_data(self, project: str, tables: list, output: str, identifiers: str :param identifiers: The name of the ID mapping :param multilines: Write value sequences as multiple lines """ - if not (output.endswith('.zip')): - raise Exception('Output must be a zip file.') - - exporter = io.OpalExporter.build(client=self.client, datasource=project , tables=tables, - identifiers=identifiers, output=output, - incremental=False, - verbose=self.verbose) - response = exporter.submit('xml') + if not (output.endswith(".zip")): + raise Exception("Output must be a zip file.") + + exporter = io.OpalExporter.build( + client=self.client, + datasource=project, + tables=tables, + identifiers=identifiers, + output=output, + incremental=False, + verbose=self.verbose, + ) + response = exporter.submit("xml") return response.from_json() @@ -477,14 +780,36 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, - help='Project name from which genotypes data will be exported') - parser.add_argument('--vcf', '-vcf', nargs='+', required=True, help='List of VCF/BCF file names') - parser.add_argument('--destination', '-d', required=True, help='Destination folder (in Opal file system)') - parser.add_argument('--filter-table', '-f', required=False, - help='Participant table name to be used to filter the samples by participant ID (only relevant if there is a sample-participant mapping defined)') - parser.add_argument('--no-case-controls', '-nocc', action='store_true', - help='Do not include case control samples (only relevant if there is a sample-participant mapping defined)') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name from which genotypes data will be exported", + ) + parser.add_argument( + "--vcf", "-vcf", nargs="+", required=True, help="List of VCF/BCF file names" + ) + parser.add_argument( + "--destination", + "-d", + required=True, + help="Destination folder (in Opal file system)", + ) + parser.add_argument( + "--filter-table", + "-f", + required=False, + help="Participant table name to be used to filter the samples by " + "participant ID (only relevant if there is a sample-participant " + "mapping defined)", + ) + parser.add_argument( + "--no-case-controls", + "-nocc", + action="store_true", + help="Do not include case control samples (only relevant if there " + "is a sample-participant mapping defined)", + ) @classmethod def do_command(cls, args): @@ -494,20 +819,35 @@ def do_command(cls, args): # Build and send requests client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = ExportVCFCommand(client, args.verbose) \ - .export_data(args.project, args.vcf, args.destination, not args.no_case_controls, args.filter_table) + res = ExportVCFCommand(client, args.verbose).export_data( + args.project, + args.vcf, + args.destination, + not args.no_case_controls, + args.filter_table, + ) finally: client.close() - def export_data(self, project: str, vcf: list, destination: str, case_controls: bool = True, filter_table: str = None) -> dict: + def export_data( + self, + project: str, + vcf: list, + destination: str, + case_controls: bool = True, + filter_table: str = None, + ) -> dict: """ Export VCF/BCF files. :param project: The project name :param vcf: The list of VCF/BCF file names :param destination: The output folder path - :param case_controls: Include case control samples (only relevant if there is a sample-participant mapping defined) - :param filter_table: Participant table name to be used to filter the samples by participant ID (only relevant if there is a sample-participant mapping defined) + :param case_controls: Include case control samples (only relevant if + there is a sample-participant mapping defined) + :param filter_table: Participant table name to be used to filter the + samples by participant ID (only relevant if there + is a sample-participant mapping defined) """ request = self.client.new_request() request.fail_on_error().accept_json().content_type_json() @@ -515,15 +855,15 @@ def export_data(self, project: str, vcf: list, destination: str, case_controls: request.verbose() options = { - 'project': project, - 'names': vcf, - 'destination': destination, - 'caseControl': case_controls + "project": project, + "names": vcf, + "destination": destination, + "caseControl": case_controls, } if filter_table: - options['table'] = filter_table + options["table"] = filter_table # send request - uri = core.UriBuilder(['project', project, 'commands', '_export_vcf']).build() + uri = core.UriBuilder(["project", project, "commands", "_export_vcf"]).build() response = request.resource(uri).post().content(json.dumps(options)).send() - return response.from_json() \ No newline at end of file + return response.from_json() diff --git a/obiba_opal/file.py b/obiba_opal/file.py index 1730f8d..9cdceac 100755 --- a/obiba_opal/file.py +++ b/obiba_opal/file.py @@ -21,13 +21,37 @@ def add_arguments(self, parser): """ Add file command specific options """ - parser.add_argument('path', help='File path in Opal file system.') - parser.add_argument('--download', '-dl', action='store_true', help='Download file, or folder (as a zip file).') - parser.add_argument('--download-password', '-dlp', help='Password to encrypt the file content.') - parser.add_argument('--upload', '-up', required=False, help='Upload a local file to a folder in Opal file system.') - parser.add_argument('--delete', '-dt', action='store_true', help='Delete a file on Opal file system.') - parser.add_argument('--force', '-f', action='store_true', help='Skip confirmation.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("path", help="File path in Opal file system.") + parser.add_argument( + "--download", + "-dl", + action="store_true", + help="Download file, or folder (as a zip file).", + ) + parser.add_argument( + "--download-password", "-dlp", help="Password to encrypt the file content." + ) + parser.add_argument( + "--upload", + "-up", + required=False, + help="Upload a local file to a folder in Opal file system.", + ) + parser.add_argument( + "--delete", + "-dt", + action="store_true", + help="Delete a file on Opal file system.", + ) + parser.add_argument( + "--force", "-f", action="store_true", help="Skip confirmation." + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -41,7 +65,9 @@ def do_command(self, args): # send request if args.download or args.download_password: - service.download_file(args.path, sys.stdout.fileno(), args.download_password) + service.download_file( + args.path, sys.stdout.fileno(), args.download_password + ) else: if args.upload: service.upload_file(args.upload, args.path) @@ -50,11 +76,13 @@ def do_command(self, args): if args.force: service.delete_file(args.path) else: - confirmed = input('Delete the file "' + args.path + '"? [y/N]: ') - if confirmed == 'y': + confirmed = input( + 'Delete the file "' + args.path + '"? [y/N]: ' + ) + if confirmed == "y": service.delete_file(args.path) else: - print('Aborted.') + print("Aborted.") sys.exit(0) else: res = service.file_info(args.path) @@ -68,7 +96,8 @@ def download_file(self, path: str, fd, download_password: str = None): :param path: The file path in Opal :param fd: The destination file descriptor (see os.fdopen()) - :param download_password: The password to use to encrypt the downloaded zip archive + :param download_password: The password to use to encrypt the + downloaded zip archive """ request = self.client.new_request() request.fail_on_error() @@ -78,12 +107,12 @@ def download_file(self, path: str, fd, download_password: str = None): file = FileService.OpalFile(path) - - fp = os.fdopen(fd, 'wb') - request.get().resource(file.get_ws()).accept('*/*').header('X-File-Key', download_password).send(fp) + fp = os.fdopen(fd, "wb") + request.get().resource(file.get_ws()).accept("*/*").header( + "X-File-Key", download_password + ).send(fp) fp.flush() - def upload_file(self, upload: str, path: str): """ Upload a file to Opal. @@ -99,7 +128,7 @@ def upload_file(self, upload: str, path: str): file = FileService.OpalFile(path) - request.content_upload(upload).accept('text/html') + request.content_upload(upload).accept("text/html") request.post().resource(file.get_ws()).send() def delete_file(self, path: str): @@ -144,7 +173,7 @@ def __init__(self, path): self.path = path def get_meta_ws(self): - return '/files/_meta%s' % self.path + return f"/files/_meta{self.path}" def get_ws(self): - return '/files%s' % self.path + return f"/files{self.path}" From 772990b6db9d0936d0c04094c8d370f41f5ed159 Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 17:18:50 +0100 Subject: [PATCH 2/9] Continue fixing E501 line length issues in imports.py --- .github/workflows/ci.yml | 3 + Makefile | 9 + obiba_opal/__init__.py | 136 ++++- obiba_opal/analysis.py | 122 +++-- obiba_opal/core.py | 139 +++--- obiba_opal/imports.py | 1011 ++++++++++++++++++++++++++++---------- obiba_opal/io.py | 341 ++++++++----- obiba_opal/perm.py | 611 +++++++++++++++++------ obiba_opal/project.py | 251 +++++++--- obiba_opal/security.py | 16 +- obiba_opal/sql.py | 96 +++- obiba_opal/subjects.py | 218 +++++--- obiba_opal/system.py | 451 ++++++++++++----- obiba_opal/table.py | 294 +++++++---- pyproject.toml | 3 + tests/test_core.py | 38 +- tests/test_data.py | 36 +- tests/test_dictionary.py | 56 ++- tests/test_exports.py | 16 +- tests/test_file.py | 122 ++--- tests/test_imports.py | 28 +- tests/test_perm.py | 23 +- tests/test_project.py | 32 +- tests/test_subjects.py | 29 +- tests/test_taxonomy.py | 137 +++--- tests/utils.py | 10 +- uv.lock | 31 +- 27 files changed, 2971 insertions(+), 1288 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dd54939..1300114 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,5 +28,8 @@ jobs: - name: Install dependencies run: uv sync --all-extras + - name: Run linting + run: uv run ruff check . + - name: Run tests run: uv run pytest diff --git a/Makefile b/Makefile index e7e59d0..b34fcba 100644 --- a/Makefile +++ b/Makefile @@ -4,6 +4,15 @@ install: test: uv run --all-extras pytest +lint: + uv run ruff check . + +fix: + uv run ruff check . --fix + +format: + uv run ruff format . + build: uv build diff --git a/obiba_opal/__init__.py b/obiba_opal/__init__.py index fc40231..0473366 100644 --- a/obiba_opal/__init__.py +++ b/obiba_opal/__init__.py @@ -1,14 +1,136 @@ -from obiba_opal.core import UriBuilder, OpalClient, OpalRequest, OpalResponse, Formatter, MagmaNameResolver, HTTPError -from obiba_opal.project import ProjectService, BackupProjectCommand, RestoreProjectCommand +from obiba_opal.core import ( + UriBuilder, + OpalClient, + OpalRequest, + OpalResponse, + Formatter, + MagmaNameResolver, + HTTPError, +) +from obiba_opal.project import ( + ProjectService, + BackupProjectCommand, + RestoreProjectCommand, +) from obiba_opal.table import CopyTableCommand, BackupViewService, RestoreViewService -from obiba_opal.dictionary import DictionaryService, ExportAnnotationsService, ImportAnnotationsService +from obiba_opal.dictionary import ( + DictionaryService, + ExportAnnotationsService, + ImportAnnotationsService, +) from obiba_opal.data import DataService, EntityService from obiba_opal.analysis import AnalysisCommand, ExportAnalysisService from obiba_opal.file import FileService -from obiba_opal.exports import ExportPluginCommand, ExportCSVCommand, ExportXMLCommand, ExportRSASCommand, ExportRSPSSCommand, ExportRSTATACommand, ExportRDSCommand, ExportSQLCommand, ExportVCFCommand +from obiba_opal.exports import ( + ExportPluginCommand, + ExportCSVCommand, + ExportXMLCommand, + ExportRSASCommand, + ExportRSPSSCommand, + ExportRSTATACommand, + ExportRDSCommand, + ExportSQLCommand, + ExportVCFCommand, +) from obiba_opal.subjects import UserService, GroupService -from obiba_opal.perm import ProjectPermService, DatasourcePermService, TablePermService, VariablePermService, ResourcePermService, ResourcesPermService, RPermService, DataSHIELDPermService, SystemPermService -from obiba_opal.imports import ImportPluginCommand, ImportCSVCommand, ImportIDMapService, ImportIDService, ImportLimeSurveyCommand, ImportOpalCommand, ImportRDSCommand, ImportRSASCommand, ImportRSPSSCommand, ImportRSTATACommand, ImportSQLCommand, ImportVCFCommand, ImportXMLCommand -from obiba_opal.system import PluginService, SystemService, TaxonomyService, TaskService, RESTService +from obiba_opal.perm import ( + ProjectPermService, + DatasourcePermService, + TablePermService, + VariablePermService, + ResourcePermService, + ResourcesPermService, + RPermService, + DataSHIELDPermService, + SystemPermService, +) +from obiba_opal.imports import ( + ImportPluginCommand, + ImportCSVCommand, + ImportIDMapService, + ImportIDService, + ImportLimeSurveyCommand, + ImportOpalCommand, + ImportRDSCommand, + ImportRSASCommand, + ImportRSPSSCommand, + ImportRSTATACommand, + ImportSQLCommand, + ImportVCFCommand, + ImportXMLCommand, +) +from obiba_opal.system import ( + PluginService, + SystemService, + TaxonomyService, + TaskService, + RESTService, +) from obiba_opal.sql import SQLService, SQLHistoryService from obiba_opal.security import EncryptService, DecryptService + +__all__ = [ + "UriBuilder", + "OpalClient", + "OpalRequest", + "OpalResponse", + "Formatter", + "MagmaNameResolver", + "HTTPError", + "ProjectService", + "BackupProjectCommand", + "RestoreProjectCommand", + "CopyTableCommand", + "BackupViewService", + "RestoreViewService", + "DictionaryService", + "ExportAnnotationsService", + "ImportAnnotationsService", + "DataService", + "EntityService", + "AnalysisCommand", + "ExportAnalysisService", + "FileService", + "ExportPluginCommand", + "ExportCSVCommand", + "ExportXMLCommand", + "ExportRSASCommand", + "ExportRSPSSCommand", + "ExportRSTATACommand", + "ExportRDSCommand", + "ExportSQLCommand", + "ExportVCFCommand", + "UserService", + "GroupService", + "ProjectPermService", + "DatasourcePermService", + "TablePermService", + "VariablePermService", + "ResourcePermService", + "ResourcesPermService", + "RPermService", + "DataSHIELDPermService", + "SystemPermService", + "ImportPluginCommand", + "ImportCSVCommand", + "ImportIDMapService", + "ImportIDService", + "ImportLimeSurveyCommand", + "ImportOpalCommand", + "ImportRDSCommand", + "ImportRSASCommand", + "ImportRSPSSCommand", + "ImportRSTATACommand", + "ImportSQLCommand", + "ImportVCFCommand", + "ImportXMLCommand", + "PluginService", + "SystemService", + "TaxonomyService", + "TaskService", + "RESTService", + "SQLService", + "SQLHistoryService", + "EncryptService", + "DecryptService", +] diff --git a/obiba_opal/analysis.py b/obiba_opal/analysis.py index ab47492..9b3f0a7 100644 --- a/obiba_opal/analysis.py +++ b/obiba_opal/analysis.py @@ -23,9 +23,19 @@ def add_arguments(self, parser): """ Add analyse command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Project name') - parser.add_argument('--config', '-c', required=True, help='A local JSON file containing the analysis configuration') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--project", "-pr", required=True, help="Project name") + parser.add_argument( + "--config", + "-c", + required=True, + help="A local JSON file containing the analysis configuration", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -34,11 +44,13 @@ def do_command(self, args): """ client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = AnalysisCommand(client, args.verbose).analyse(args.project, args.config) + res = AnalysisCommand(client, args.verbose).analyse( + args.project, args.config + ) # format response core.Formatter.print_json(res, args.json) finally: - client.close + client.close() def analyse(self, project: str, config: str) -> dict: """ @@ -50,12 +62,12 @@ def analyse(self, project: str, config: str) -> dict: dto = self._create_dto(project, config) request = self.client.new_request() request.fail_on_error().accept_json().content_type_json() - ws = "/project/%s/commands/_analyse" % project + ws = f"/project/{project}/commands/_analyse" response = request.post().resource(ws).content(json.dumps(dto)).send() # get job status location = response.get_location() - job_resource = re.sub(r'http.*\/ws', r'', location) + job_resource = re.sub(r"http.*\/ws", r"", location) request = self.client.new_request() request.fail_on_error().accept_json() if self.verbose: @@ -67,12 +79,13 @@ def _create_dto(self, project, config): """ Create an analysis option DTO """ - dto = {'project': project} - configJson = json.loads(open(config, 'r').read()) - if type(configJson) is list: - dto['analyses'] = configJson + dto = {"project": project} + with open(config) as f: + configJson = json.load(f) + if isinstance(configJson, list): + dto["analyses"] = configJson else: - dto['analyses'] = [configJson] + dto["analyses"] = [configJson] return dto @@ -90,13 +103,30 @@ def add_arguments(self, parser): """ Add export analysis command specific options """ - parser.add_argument('--project', '-pr', required=True, - help='Project name for which analysis data will be exported.') - parser.add_argument('--table', '-t', required=False, help='Table name for which analysis data will be exported.') - parser.add_argument('--all-results', '-ar', action='store_true', - help='Export all results (default exports last result).') - parser.add_argument('--analysis-id', '-ai', required=False, - help='A table Analysis ID for which results will be exported.') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name for which analysis data will be exported.", + ) + parser.add_argument( + "--table", + "-t", + required=False, + help="Table name for which analysis data will be exported.", + ) + parser.add_argument( + "--all-results", + "-ar", + action="store_true", + help="Export all results (default exports last result).", + ) + parser.add_argument( + "--analysis-id", + "-ai", + required=False, + help="A table Analysis ID for which results will be exported.", + ) @classmethod def do_command(self, args): @@ -107,12 +137,18 @@ def do_command(self, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: fd = sys.stdout.fileno() - if args.table is None: - ExportAnalysisService(client, args.verbose).export_project_analyses(args.project, fd, args.all_results) + if args.table is None: + ExportAnalysisService(client, args.verbose).export_project_analyses( + args.project, fd, args.all_results + ) elif args.analysis_id is None: - ExportAnalysisService(client, args.verbose).export_table_analyses(args.project, args.table, fd, args.all_results) + ExportAnalysisService(client, args.verbose).export_table_analyses( + args.project, args.table, fd, args.all_results + ) else: - ExportAnalysisService(client, args.verbose).export_table_analysis(args.project, args.table, args.analysis_id, fd, args.all_results) + ExportAnalysisService(client, args.verbose).export_table_analysis( + args.project, args.table, args.analysis_id, fd, args.all_results + ) finally: client.close() @@ -126,11 +162,13 @@ def export_project_analyses(self, project: str, fd, all_results: bool = True): """ request = self.client.new_request() request.fail_on_error().accept("application/zip") - fp = os.fdopen(fd, 'wb') + fp = os.fdopen(fd, "wb") request.get().resource(self._make_ws(project, all_results=all_results)).send(fp) fp.flush() - def export_table_analyses(self, project: str, table: str, fd, all_results: bool = True): + def export_table_analyses( + self, project: str, table: str, fd, all_results: bool = True + ): """ Export project's analyses for a specific table in a zip file. @@ -140,11 +178,15 @@ def export_table_analyses(self, project: str, table: str, fd, all_results: bool """ request = self.client.new_request() request.fail_on_error().accept("application/zip") - fp = os.fdopen(fd, 'wb') - request.get().resource(self._make_ws(project, table, all_results=all_results)).send() + fp = os.fdopen(fd, "wb") + request.get().resource( + self._make_ws(project, table, all_results=all_results) + ).send() fp.flush() - def export_table_analysis(self, project: str, table: str, analysis_id: str, fd, all_results: bool = True): + def export_table_analysis( + self, project: str, table: str, analysis_id: str, fd, all_results: bool = True + ): """ Export project's analysis for a specific table and analyis in a zip file. @@ -154,20 +196,28 @@ def export_table_analysis(self, project: str, table: str, analysis_id: str, fd, :param fd: Destination file descriptor (see os.fdopen()) """ request = self.client.new_request() - request.fail_on_error().accept('application/zip') - fp = os.fdopen(fd, 'wb') - request.get().resource(self._make_ws(project, table, analysis_id, all_results)).send() + request.fail_on_error().accept("application/zip") + fp = os.fdopen(fd, "wb") + request.get().resource( + self._make_ws(project, table, analysis_id, all_results) + ).send() fp.flush() - def _make_ws(self, project: str, table: str = None, analysis_id: str = None, all_results: bool = True): + def _make_ws( + self, + project: str, + table: str = None, + analysis_id: str = None, + all_results: bool = True, + ): """ Build the web service resource path """ if table is None: - ws = '/project/%s/analyses/_export' % project + ws = f"/project/{project}/analyses/_export" elif analysis_id is None: - ws = '/project/%s/table/%s/analyses/_export' % (project, table) + ws = f"/project/{project}/table/{table}/analyses/_export" else: - ws = '/project/%s/table/%s/analysis/%s/_export' % (project, table, analysis_id) + ws = f"/project/{project}/table/{table}/analysis/{analysis_id}/_export" - return '%s?all=true' % ws if all_results else ws + return f"{ws}?all=true" if all_results else ws diff --git a/obiba_opal/core.py b/obiba_opal/core.py index 6ac09ee..28fc805 100755 --- a/obiba_opal/core.py +++ b/obiba_opal/core.py @@ -67,12 +67,13 @@ def buildWithCertificate(cls, server, cert, key, no_ssl_verify: bool = False): :param server - Opal server address :param cert - public certificate/key (must be named as 'publickey.pem') :param key - private key (must be named as 'privatekey.pem') - :param no_ssl_verify - if True, the SSL certificate is not verified (not recommended) + :param no_ssl_verify - if True, the SSL certificate is not verified + (not recommended) """ client = cls(server) if client.base_url.startswith("https:"): - client.session.verify = False if no_ssl_verify else True + client.session.verify = not no_ssl_verify client.session.cert = (cert, key) return client @@ -87,7 +88,8 @@ def buildWithAuthentication( :param server - Opal server address :param user - username :param password - user password - :param no_ssl_verify - if True, the SSL certificate is not verified (not recommended) + :param no_ssl_verify - if True, the SSL certificate is not verified + (not recommended) """ client = cls(server) if client.base_url.startswith("https:"): @@ -114,26 +116,25 @@ def buildWithToken(cls, server, token, no_ssl_verify: bool = False): :param server - Opal server address :param token - token key - :param no_ssl_verify - if True, the SSL certificate is not verified (not recommended) + :param no_ssl_verify - if True, the SSL certificate is not verified + (not recommended) """ client = cls(server) if client.base_url.startswith("https:"): - client.session.verify = False if no_ssl_verify else True + client.session.verify = not no_ssl_verify client.token(token) return client def __ensure_entry(self, text, entry, pwd=False): e = entry if not entry: - if pwd: - e = getpass.getpass(prompt=text + ": ") - else: - e = input(text + ": ") + e = getpass.getpass(prompt=text + ": ") if pwd else input(text + ": ") return e def credentials(self, user, password): """ - Creates the authorization header and attempts to input the required user/password + Creates the authorization header and attempts to input the required + user/password :param user - username :param password - user password @@ -142,8 +143,7 @@ def credentials(self, user, password): p = self.__ensure_entry("Password", password, True) return self.header( "Authorization", - "Basic " - + base64.b64encode("{}:{}".format(u, p).encode("utf-8")).decode("utf-8"), + "Basic " + base64.b64encode(f"{u}:{p}".encode()).decode("utf-8"), ) def token(self, token): @@ -178,7 +178,8 @@ def verify(self, value): """ Ignore or validate certificate - :param value = True/False to validation or not. Value can also be a CA_BUNDLE file or directory (e.g. 'verify=/etc/ssl/certs/ca-certificates.crt') + :param value = True/False to validation or not. Value can also be a + CA_BUNDLE file or directory (e.g. 'verify=/etc/ssl/certs/ca-certificates.crt') """ self.session.verify = value return self @@ -205,7 +206,7 @@ def close(self): try: self.new_request().resource("/auth/session/_current").delete().send() self.session.close() - except Exception as e: + except Exception: pass self.id = None @@ -238,21 +239,18 @@ def parse(cls, args): data["key"] = argv["ssl_key"] else: raise ValueError( - "Invalid login information. Requires user-password or token or certificate-key information" + "Invalid login information. Requires user-password or token " + "or certificate-key information" ) - setattr(cls, "data", data) + cls.data = data return cls() def isToken(self): - if self.data.keys() & {"token"}: - return True - return False + return bool(self.data.keys() & {"token"}) def isSsl(self): - if self.data.keys() & {"cert", "key"}: - return True - return False + return bool(self.data.keys() & {"cert", "key"}) class OpalRequest: @@ -276,7 +274,8 @@ def __init__(self, opal_client): def timeout(self, value): """ Sets the connection and read timeout - Note: value can be a tupple to have different timeouts for connection and reading (connTimout, readTimeout) + Note: value can be a tupple to have different timeouts for connection + and reading (connTimout, readTimeout) :param value - connection/read timout """ @@ -413,10 +412,6 @@ def __build_request(self): else: raise ValueError("Resource is missing") - if self._upload_file is not None: - # Open file here, will be closed in send() method - self.files = {"file": (os.path.basename(self._upload_file), open(self._upload_file, "rb"))} - if self.files is not None: request.files = self.files @@ -429,8 +424,27 @@ def send(self, fp=None): """ Sends the request via client session object """ - request = self.__build_request() - try: + # Handle file upload with context manager + if self._upload_file is not None: + with open(self._upload_file, "rb") as file_handle: + self.files = { + "file": ( + os.path.basename(self._upload_file), + file_handle, + ) + } + request = self.__build_request() + response = OpalResponse(self.client.session.send(request.prepare())) + + if self._fail_on_error and response.code >= 400: + raise HTTPError(response) + + if fp is not None: + fp.write(response.content) + + return response + else: + request = self.__build_request() response = OpalResponse(self.client.session.send(request.prepare())) if self._fail_on_error and response.code >= 400: @@ -440,12 +454,6 @@ def send(self, fp=None): fp.write(response.content) return response - finally: - # Close file handle if it was opened for upload - if self.files is not None and "file" in self.files: - file_tuple = self.files["file"] - if len(file_tuple) > 1 and hasattr(file_tuple[1], "close"): - file_tuple[1].close() class OpalResponse: @@ -453,7 +461,9 @@ class OpalResponse: Response from Opal: code, headers and content """ - def __init__(self, response: Response = Response()): + def __init__(self, response: Response = None): + if response is None: + response = Response() self.response = response @property @@ -474,8 +484,8 @@ def from_json(self): else: try: return self.response.json() - except Exception as e: - if type(self.content) == str: + except Exception: + if isinstance(self.content, str): return self.response.content else: # FIXME silently fail @@ -497,7 +507,7 @@ def get_location(self): def extract_cookie_value(self, name: str) -> str | None: if "set-cookie" in self.response.headers: - if type(self.response.headers["set-cookie"]) == str: + if isinstance(self.response.headers["set-cookie"], str): return self._extract_cookie_single_value( name, self.response.headers["set-cookie"] ) @@ -552,28 +562,19 @@ def is_datasources(self): return self.datasource is None or self.datasource == "*" def is_datasource(self): - if self.table: - return False - else: - return True + return not self.table def is_tables(self): return self.table == "*" def is_table(self): - if self.table and not self.variable: - return True - else: - return False + return bool(self.table and not self.variable) def is_variables(self): return self.variable == "*" def is_variable(self): - if self.variable: - return True - else: - return False + return bool(self.variable) def get_ws(self): if self.is_datasources(): @@ -615,9 +616,9 @@ class UriBuilder: Build a valid Uri. """ - def __init__(self, path=[], params={}): - self._path = path - self._params = params + def __init__(self, path=None, params=None): + self._path = path if path is not None else [] + self._params = params if params is not None else {} def path(self, path): self._path = path @@ -632,29 +633,35 @@ def params(self, params): return self def query(self, key, value): - val = "%s" % value - if type(value) == bool: + val = f"{value}" + if isinstance(value, bool): val = val.lower() - self._params.update([(key, val), ]) + self._params.update( + [ + (key, val), + ] + ) return self def __str__(self): def concat_segment(p, s): - return "%s/%s" % (p, s) + return f"{p}/{s}" def concat_params(k): - return "%s=%s" % ( + return "{}={}".format( urllib.parse.quote(k), urllib.parse.quote(str(self._params[k])), ) def concat_query(q, p): - return "%s&%s" % (q, p) + return f"{q}&{p}" p = urllib.parse.quote("/" + reduce(concat_segment, self._path)) if len(self._params): - q = reduce(concat_query, list(map(concat_params, list(self._params.keys())))) - return "%s?%s" % (p, q) + q = reduce( + concat_query, list(map(concat_params, list(self._params.keys()))) + ) + return f"{p}?{q}" else: return p @@ -665,13 +672,11 @@ def build(self): class HTTPError(Exception): def __init__(self, response: OpalResponse, message: str = None): # Call the base class constructor with the parameters it needs - super().__init__(message if message else "HTTP Error: %s" % response.code) + super().__init__(message if message else f"HTTP Error: {response.code}") self.code = response.code http_status = [x for x in list(HTTPStatus) if x.value == response.code][0] self.message = ( - message - if message - else "%s: %s" % (http_status.phrase, http_status.description) + message if message else f"{http_status.phrase}: {http_status.description}" ) self.error = ( response.from_json() @@ -679,7 +684,7 @@ def __init__(self, response: OpalResponse, message: str = None): else {"code": response.code, "status": self.message} ) # case the reported error is not a dict - if type(self.error) != dict: + if not isinstance(self.error, dict): self.error = {"code": response.code, "status": self.error} def is_client_error(self) -> bool: diff --git a/obiba_opal/imports.py b/obiba_opal/imports.py index 033b4da..4ff9c39 100644 --- a/obiba_opal/imports.py +++ b/obiba_opal/imports.py @@ -7,6 +7,7 @@ import sys import json + class ImportPluginCommand: """ Import from a plugin. @@ -21,8 +22,16 @@ def add_arguments(cls, parser): """ Add import command specific options """ - parser.add_argument('--name', '-n', required=True, help='Opal datasource plugin name') - parser.add_argument('--config', '-c', required=False, help='A JSON file containing the import configuration. If not provided, the plugin will apply default values (or will fail).') + parser.add_argument( + "--name", "-n", required=True, help="Opal datasource plugin name" + ) + parser.add_argument( + "--config", + "-c", + required=False, + help="A JSON file containing the import configuration. If not " + "provided, the plugin will apply default values (or will fail).", + ) # non specific import arguments io.add_import_arguments(parser) @@ -35,17 +44,39 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - config = json.loads(open(args.config).read()) - res = cls(client, args.verbose).import_data(args.name, config, args.destination, args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + with open(args.config) as f: + config = json.loads(f.read()) + res = cls(client, args.verbose).import_data( + args.name, + config, + args.destination, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - - def import_data(self, name: str, config: dict, destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None) -> dict: + + def import_data( + self, + name: str, + config: dict, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ) -> dict: """ Import tables using a plugin. - + :param name: The plugin name :param config: The plugin configuration :param destination: The destination project @@ -53,13 +84,24 @@ def import_data(self, name: str, config: dict, destination: str, tables: list = :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(client=self.client, destination=destination, tables=tables, - incremental=incremental, limit=limit, - identifiers=identifiers, - policy=policy, merge=merge, verbose=self.verbose) + :param policy: The ID mapping policy: "required" (each identifiers must + be mapped prior importation, default), "ignore" (ignore + unknown identifiers), "generate" (generate a system + identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one + (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + client=self.client, + destination=destination, + tables=tables, + incremental=incremental, + limit=limit, + identifiers=identifiers, + policy=policy, + merge=merge, + verbose=self.verbose, + ) extension_factory = self.OpalExtensionFactory(name, config) response = importer.submit(extension_factory) @@ -77,11 +119,11 @@ def add(self, factory): extension = {} if self.name: - extension['name'] = self.name + extension["name"] = self.name if self.config: - extension['parameters'] = json.dumps(self.config) + extension["parameters"] = json.dumps(self.config) - factory['Magma.PluginDatasourceFactoryDto.params'] = extension + factory["Magma.PluginDatasourceFactoryDto.params"] = extension class ImportCSVCommand: @@ -98,14 +140,34 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, help='CSV file to import from the Opal filesystem.') - parser.add_argument('--characterSet', '-c', required=False, help='Character set.') - parser.add_argument('--separator', '-s', required=False, help='Field separator.') - parser.add_argument('--quote', '-q', required=False, help='Quotation mark character.') - parser.add_argument('--firstRow', '-f', type=int, required=False, help='From row.') - parser.add_argument('--valueType', '-vt', required=False, - help='Default value type (text, integer, decimal, boolean etc.). When not specified, "text" is the default.') - parser.add_argument('--type', '-ty', required=True, help='Entity type (e.g. Participant)') + parser.add_argument( + "--path", + "-pa", + required=True, + help="CSV file to import from the Opal filesystem.", + ) + parser.add_argument( + "--characterSet", "-c", required=False, help="Character set." + ) + parser.add_argument( + "--separator", "-s", required=False, help="Field separator." + ) + parser.add_argument( + "--quote", "-q", required=False, help="Quotation mark character." + ) + parser.add_argument( + "--firstRow", "-f", type=int, required=False, help="From row." + ) + parser.add_argument( + "--valueType", + "-vt", + required=False, + help="Default value type (text, integer, decimal, boolean etc.). " + 'When not specified, "text" is the default.', + ) + parser.add_argument( + "--type", "-ty", required=True, help="Entity type (e.g. Participant)" + ) # non specific import arguments io.add_import_arguments(parser) @@ -119,48 +181,98 @@ def do_command(cls, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.path, args.destination, args.characterSet, args.separator, args.quote, args.firstRow, args.valueType, args.type, - args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.path, + args.destination, + args.characterSet, + args.separator, + args.quote, + args.firstRow, + args.valueType, + args.type, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, destination: str, characterSet: str = None, separator: str = None, quote: str = None, firstRow: int = None, valueType: str = None, type: str = None, - tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None) -> dict: + def import_data( + self, + path: str, + destination: str, + characterSet: str = None, + separator: str = None, + quote: str = None, + firstRow: int = None, + valueType: str = None, + type: str = None, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ) -> dict: """ Import tables from a CSV file. - + :param characterSet: The cheracter set :param separator: The separator char :param quote: The quote char :param firstRow: From row :param path: File to import in Opal file system - :param valueType: Default value type (text, integer, decimal, boolean etc.). When not specified, "text" is the default - :param type: Entity type (e.g. Participant) - :param destination: The destination project - :param tables: The tables names to be imported (default is all) - :param incremental: Incremental import (new and updated value sets) - :param limit: Import limit (maximum number of value sets) - :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination=destination, tables=tables, - incremental=incremental, limit=limit, - identifiers=identifiers, - policy=policy, merge=merge, verbose=self.verbose) - extension_factory = self.OpalExtensionFactory(characterSet=characterSet, separator=separator, - quote=quote, - firstRow=firstRow, path=path, valueType=valueType, - type=type, - tables=tables, - destination=destination) + :param valueType: Default value type (text, integer, decimal, boolean + etc.). When not specified, "text" is the default + :param policy: The ID mapping policy: "required" (each identifiers must + be mapped prior importation, default), "ignore" (ignore + unknown identifiers), "generate" (generate a system + identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one + (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination=destination, + tables=tables, + incremental=incremental, + limit=limit, + identifiers=identifiers, + policy=policy, + merge=merge, + verbose=self.verbose, + ) + extension_factory = self.OpalExtensionFactory( + characterSet=characterSet, + separator=separator, + quote=quote, + firstRow=firstRow, + path=path, + valueType=valueType, + type=type, + tables=tables, + destination=destination, + ) response = importer.submit(extension_factory) return response.from_json() class OpalExtensionFactory(io.OpalImporter.ExtensionFactoryInterface): - def __init__(self, characterSet, separator, quote, firstRow, path, valueType, type, tables, destination): + def __init__( + self, + characterSet, + separator, + quote, + firstRow, + path, + valueType, + type, + tables, + destination, + ): self.characterSet = characterSet self.separator = separator self.quote = quote @@ -178,41 +290,39 @@ def add(self, factory): csv_factory = {} if self.characterSet: - csv_factory['characterSet'] = self.characterSet + csv_factory["characterSet"] = self.characterSet if self.separator: - csv_factory['separator'] = self.separator + csv_factory["separator"] = self.separator if self.quote: - csv_factory['quote'] = self.quote + csv_factory["quote"] = self.quote if self.firstRow: - csv_factory['firstRow'] = self.firstRow + csv_factory["firstRow"] = self.firstRow if self.valueType: - csv_factory['defaultValueType'] = self.valueType + csv_factory["defaultValueType"] = self.valueType - table = { - 'data': self.path, - 'entityType': self.type - } + table = {"data": self.path, "entityType": self.type} if self.tables: - table['name'] = self.tables[0] + table["name"] = self.tables[0] else: # Take filename as the table name name = self.path.split("/") - index = name[-1].find('.csv') + index = name[-1].find(".csv") if index > 0: - table['name'] = name[-1][:index] + table["name"] = name[-1][:index] else: - table['name'] = name[-1] - table['refTable'] = self.destination + "." + table['name'] + table["name"] = name[-1] + table["refTable"] = self.destination + "." + table["name"] - csv_factory['tables'] = [table] + csv_factory["tables"] = [table] + + factory["Magma.CsvDatasourceFactoryDto.params"] = csv_factory - factory['Magma.CsvDatasourceFactoryDto.params'] = csv_factory class ImportLimeSurveyCommand: """ @@ -228,11 +338,39 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--url', '-ur', required=False, help='LimeSurvey SQL database JDBC url (if not provided, plugin defaults will be used).') - parser.add_argument('--uname', '-un', required=False, help='LimeSurvey SQL database user name (if not provided, plugin defaults will be used).') - parser.add_argument('--pword', '-pwd', required=False, help='LimeSurvey SQL database user password (if not provided, plugin defaults will be used).') - parser.add_argument('--prefix', '-pr', required=False, help='Table prefix (if not provided, plugin defaults will be used).') - parser.add_argument('--properties', '-pp', required=False, help='SQL properties (if not provided, plugin defaults will be used).') + parser.add_argument( + "--url", + "-ur", + required=False, + help="LimeSurvey SQL database JDBC url (if not provided, plugin " + "defaults will be used).", + ) + parser.add_argument( + "--uname", + "-un", + required=False, + help="LimeSurvey SQL database user name (if not provided, plugin " + "defaults will be used).", + ) + parser.add_argument( + "--pword", + "-pwd", + required=False, + help="LimeSurvey SQL database user password (if not provided, " + "plugin defaults will be used).", + ) + parser.add_argument( + "--prefix", + "-pr", + required=False, + help="Table prefix (if not provided, plugin defaults will be used).", + ) + parser.add_argument( + "--properties", + "-pp", + required=False, + help="SQL properties (if not provided, plugin defaults will be used).", + ) # non specific import arguments io.add_import_arguments(parser) @@ -246,18 +384,42 @@ def do_command(cls, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.url, args.uname, args.pword, args.prefix, args.properties, - args.destination, args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.url, + args.uname, + args.pword, + args.prefix, + args.properties, + args.destination, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - - def import_data(self, url: str, uname: str, pword: str, prefix: str, properties: str, - destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None) -> dict: + + def import_data( + self, + url: str, + uname: str, + pword: str, + prefix: str, + properties: str, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ) -> dict: """ Import tables from a LimeSurvey database. - + :param url: LimeSurvey SQL database JDBC url (if not provided, plugin defaults will be used) :param uname: LimeSurvey SQL database user name (if not provided, plugin defaults will be used) :param pword: LimeSurvey SQL database user password (if not provided, plugin defaults will be used) @@ -269,13 +431,22 @@ def import_data(self, url: str, uname: str, pword: str, prefix: str, properties: :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination=destination, tables=tables, - incremental=incremental, limit=limit, - identifiers=identifiers, - policy=policy, merge=merge, verbose=self.verbose) - extension_factory = self.OpalExtensionFactory(url, uname, pword, prefix, properties) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination=destination, + tables=tables, + incremental=incremental, + limit=limit, + identifiers=identifiers, + policy=policy, + merge=merge, + verbose=self.verbose, + ) + extension_factory = self.OpalExtensionFactory( + url, uname, pword, prefix, properties + ) response = importer.submit(extension_factory) return response.from_json() @@ -293,22 +464,23 @@ def add(self, factory): """ extension = {} - extension['name'] = 'opal-datasource-limesurvey' + extension["name"] = "opal-datasource-limesurvey" config = {} if self.url: - config['url'] = self.url + config["url"] = self.url if self.uname: - config['username'] = self.uname + config["username"] = self.uname if self.pword: - config['password'] = self.pword + config["password"] = self.pword if self.prefix: - config['prefix'] = self.prefix + config["prefix"] = self.prefix if self.properties: - config['properties'] = self.properties - extension['parameters'] = json.dumps(config) + config["properties"] = self.properties + extension["parameters"] = json.dumps(config) + + factory["Magma.PluginDatasourceFactoryDto.params"] = extension - factory['Magma.PluginDatasourceFactoryDto.params'] = extension class ImportOpalCommand: """ @@ -324,12 +496,30 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--ropal', '-ro', required=True, help='Remote Opal server base url') - parser.add_argument('--ruser', '-ru', required=False, help='Remote user name (exclusive from using token)') - parser.add_argument('--rpassword', '-rp', required=False, help='Remote user password (exclusive from using token)') - parser.add_argument('--rtoken', '-rt', required=False, - help='Remote personal access token (exclusive from user credentials)') - parser.add_argument('--rdatasource', '-rd', required=True, help='Remote datasource name') + parser.add_argument( + "--ropal", "-ro", required=True, help="Remote Opal server base url" + ) + parser.add_argument( + "--ruser", + "-ru", + required=False, + help="Remote user name (exclusive from using token)", + ) + parser.add_argument( + "--rpassword", + "-rp", + required=False, + help="Remote user password (exclusive from using token)", + ) + parser.add_argument( + "--rtoken", + "-rt", + required=False, + help="Remote personal access token (exclusive from user credentials)", + ) + parser.add_argument( + "--rdatasource", "-rd", required=True, help="Remote datasource name" + ) # non specific import arguments io.add_import_arguments(parser) @@ -339,23 +529,49 @@ def do_command(cls, args): Execute import data command """ if (args.rtoken and args.ruser) or (not args.rtoken and not args.ruser): - raise ValueError('Either specify token OR user credentials (user name and password)') + raise ValueError( + "Either specify token OR user credentials (user name and password)" + ) # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.vebose).import_data( - args.ropal, args.rdatasource, args.ruser, args.rpassword, args.rtoken, - args.destination, args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.ropal, + args.rdatasource, + args.ruser, + args.rpassword, + args.rtoken, + args.destination, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - - def import_data(self, ropal: str, rdatasource: str, ruser: str, rpassword: str, rtoken: str, - destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + + def import_data( + self, + ropal: str, + rdatasource: str, + ruser: str, + rpassword: str, + rtoken: str, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a Opal server. - + :param ropal: Remote Opal server base url :param rdatasource: Remote project's datasource name :param ruser: Remote user name (exclusive from using token) @@ -367,14 +583,23 @@ def import_data(self, ropal: str, rdatasource: str, ruser: str, rpassword: str, :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination=destination, tables=tables, - incremental=incremental, limit=limit, - identifiers=identifiers, - policy=policy, merge=merge, verbose=self.verbose) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination=destination, + tables=tables, + incremental=incremental, + limit=limit, + identifiers=identifiers, + policy=policy, + merge=merge, + verbose=self.verbose, + ) # remote opal client factory - extension_factory = self.OpalExtensionFactory(ropal, rdatasource, ruser, rpassword, rtoken) + extension_factory = self.OpalExtensionFactory( + ropal, rdatasource, ruser, rpassword, rtoken + ) response = importer.submit(extension_factory) return response.from_json() @@ -390,17 +615,14 @@ def add(self, factory): """ Add specific datasource factory extension """ - rest_factory = { - 'remoteDatasource': self.rdatasource, - 'url': self.ropal - } + rest_factory = {"remoteDatasource": self.rdatasource, "url": self.ropal} if self.rtoken: - rest_factory['token'] = self.rtoken + rest_factory["token"] = self.rtoken else: - rest_factory['username'] = self.ruser - rest_factory['password'] = self.rpassword + rest_factory["username"] = self.ruser + rest_factory["password"] = self.rpassword - factory['Magma.RestDatasourceFactoryDto.params'] = rest_factory + factory["Magma.RestDatasourceFactoryDto.params"] = rest_factory class ImportRDSCommand: @@ -417,10 +639,21 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, help='RDS file to import from the Opal filesystem.') - parser.add_argument('--type', '-ty', required=False, help='Entity type (e.g. Participant)') - parser.add_argument('--idVariable', '-iv', required=False, - help='R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers.') + parser.add_argument( + "--path", + "-pa", + required=True, + help="RDS file to import from the Opal filesystem.", + ) + parser.add_argument( + "--type", "-ty", required=False, help="Entity type (e.g. Participant)" + ) + parser.add_argument( + "--idVariable", + "-iv", + required=False, + help="R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers.", + ) # non specific import arguments io.add_import_arguments(parser) @@ -432,24 +665,44 @@ def do_command(cls, args): """ # Build and send request # Check input filename extension - if not (args.path.endswith('.rds')): - raise Exception('Input must be a RDS file (.rds).') + if not (args.path.endswith(".rds")): + raise Exception("Input must be a RDS file (.rds).") client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.path, args.destination, args.type, args.idVariable, - args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.path, + args.destination, + args.type, + args.idVariable, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, destination: str, entityType: str = None, idVariable: str = None, - tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + path: str, + destination: str, + entityType: str = None, + idVariable: str = None, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a RDS file. - + :param path: File to import in Opal file system :param entityType: Entity type (e.g. Participant) :param idVariable: R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers @@ -459,12 +712,19 @@ def import_data(self, path: str, destination: str, entityType: str = None, idVar :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination=destination, tables=tables, - incremental=incremental, limit=limit, - identifiers=identifiers, - policy=policy, merge=merge, verbose=self.verbose) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination=destination, + tables=tables, + incremental=incremental, + limit=limit, + identifiers=identifiers, + policy=policy, + merge=merge, + verbose=self.verbose, + ) extension_factory = self.OpalExtensionFactory(path, entityType, idVariable) response = importer.submit(extension_factory) return response.from_json() @@ -480,16 +740,16 @@ def add(self, factory): Add specific datasource factory extension """ extension = { - 'file': self.path, - 'symbol': self.path[self.path.rfind("/") + 1:self.path.rfind(".")] + "file": self.path, + "symbol": self.path[self.path.rfind("/") + 1 : self.path.rfind(".")], } if self.entityType: - extension['entityType'] = self.entityType + extension["entityType"] = self.entityType if self.idVariable: - extension['idColumn'] = self.idVariable + extension["idColumn"] = self.idVariable - factory['Magma.RHavenDatasourceFactoryDto.params'] = extension + factory["Magma.RHavenDatasourceFactoryDto.params"] = extension class ImportRSASCommand: @@ -506,12 +766,24 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, - help='SAS/SAS Transport file to import from the Opal filesystem.') - parser.add_argument('--locale', '-l', required=False, help='SAS file locale (e.g. fr, en...).') - parser.add_argument('--type', '-ty', required=False, help='Entity type (e.g. Participant)') - parser.add_argument('--idVariable', '-iv', required=False, - help='SAS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.') + parser.add_argument( + "--path", + "-pa", + required=True, + help="SAS/SAS Transport file to import from the Opal filesystem.", + ) + parser.add_argument( + "--locale", "-l", required=False, help="SAS file locale (e.g. fr, en...)." + ) + parser.add_argument( + "--type", "-ty", required=False, help="Entity type (e.g. Participant)" + ) + parser.add_argument( + "--idVariable", + "-iv", + required=False, + help="SAS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + ) # non specific import arguments io.add_import_arguments(parser) @@ -523,24 +795,46 @@ def do_command(cls, args): """ # Build and send request # Check input filename extension - if not (args.path.endswith('.sas7bdat')) and not (args.path.endswith('.xpt')): - raise Exception('Input must be a SAS file (.sas7bdat or .xpt).') + if not (args.path.endswith(".sas7bdat")) and not (args.path.endswith(".xpt")): + raise Exception("Input must be a SAS file (.sas7bdat or .xpt).") client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.path, args.destination, args.locale, args.type, args.idVariable, - args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.path, + args.destination, + args.locale, + args.type, + args.idVariable, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, destination: str, locale: str = None, entityType: str = None, idVariable: str = None, - tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + path: str, + destination: str, + locale: str = None, + entityType: str = None, + idVariable: str = None, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a SAS file. - + :param path: File to import in Opal file system :param locale: SAS file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) @@ -551,12 +845,22 @@ def import_data(self, path: str, destination: str, locale: str = None, entityTyp :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination, tables, - incremental, limit, identifiers, - policy, merge, self.verbose) - extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination, + tables, + incremental, + limit, + identifiers, + policy, + merge, + self.verbose, + ) + extension_factory = self.OpalExtensionFactory( + path, locale, entityType, idVariable + ) response = importer.submit(extension_factory) return response.from_json() @@ -573,18 +877,18 @@ def add(self, factory): Add specific datasource factory extension """ extension = { - 'file': self.path, - 'symbol': self.path[self.path.rfind("/") + 1:self.path.rfind(".")] + "file": self.path, + "symbol": self.path[self.path.rfind("/") + 1 : self.path.rfind(".")], } if self.locale: - extension['locale'] = self.locale + extension["locale"] = self.locale if self.entityType: - extension['entityType'] = self.entityType + extension["entityType"] = self.entityType if self.idVariable: - extension['idColumn'] = self.idVariable + extension["idColumn"] = self.idVariable - factory['Magma.RHavenDatasourceFactoryDto.params'] = extension + factory["Magma.RHavenDatasourceFactoryDto.params"] = extension class ImportRSPSSCommand: @@ -601,12 +905,24 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, - help='SPSS file, optionally compressed, to import from the Opal filesystem.') - parser.add_argument('--locale', '-l', required=False, help='SPSS file locale (e.g. fr, en...).') - parser.add_argument('--type', '-ty', required=False, help='Entity type (e.g. Participant)') - parser.add_argument('--idVariable', '-iv', required=False, - help='SPSS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.') + parser.add_argument( + "--path", + "-pa", + required=True, + help="SPSS file, optionally compressed, to import from the Opal filesystem.", + ) + parser.add_argument( + "--locale", "-l", required=False, help="SPSS file locale (e.g. fr, en...)." + ) + parser.add_argument( + "--type", "-ty", required=False, help="Entity type (e.g. Participant)" + ) + parser.add_argument( + "--idVariable", + "-iv", + required=False, + help="SPSS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + ) # non specific import arguments io.add_import_arguments(parser) @@ -618,24 +934,46 @@ def do_command(cls, args): """ # Build and send request # Check input filename extension - if not (args.path.endswith('.sav')) and not (args.path.endswith('.zsav')): - raise Exception('Input must be a SPSS file (.sav or .zsav).') + if not (args.path.endswith(".sav")) and not (args.path.endswith(".zsav")): + raise Exception("Input must be a SPSS file (.sav or .zsav).") client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.path, args.destination, args.locale, args.type, args.idVariable, - args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.path, + args.destination, + args.locale, + args.type, + args.idVariable, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, destination: str, locale: str = None, entityType: str = None, idVariable: str = None, - tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + path: str, + destination: str, + locale: str = None, + entityType: str = None, + idVariable: str = None, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a SPSS file. - + :param path: File to import in Opal file system :param locale: SPSS file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) @@ -646,16 +984,26 @@ def import_data(self, path: str, destination: str, locale: str = None, entityTyp :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination, tables, - incremental, limit, identifiers, - policy, merge, self.verbose) - extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination, + tables, + incremental, + limit, + identifiers, + policy, + merge, + self.verbose, + ) + extension_factory = self.OpalExtensionFactory( + path, locale, entityType, idVariable + ) response = importer.submit(extension_factory) return response.from_json() - + class OpalExtensionFactory(io.OpalImporter.ExtensionFactoryInterface): def __init__(self, path, locale, entityType, idVariable): self.path = path @@ -668,18 +1016,18 @@ def add(self, factory): Add specific datasource factory extension """ extension = { - 'file': self.path, - 'symbol': self.path[self.path.rfind("/") + 1:self.path.rfind(".")] + "file": self.path, + "symbol": self.path[self.path.rfind("/") + 1 : self.path.rfind(".")], } if self.locale: - extension['locale'] = self.locale + extension["locale"] = self.locale if self.entityType: - extension['entityType'] = self.entityType + extension["entityType"] = self.entityType if self.idVariable: - extension['idColumn'] = self.idVariable + extension["idColumn"] = self.idVariable - factory['Magma.RHavenDatasourceFactoryDto.params'] = extension + factory["Magma.RHavenDatasourceFactoryDto.params"] = extension class ImportRSTATACommand: @@ -696,11 +1044,24 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, help='Stata file to import from the Opal filesystem.') - parser.add_argument('--locale', '-l', required=False, help='Stata file locale (e.g. fr, en...).') - parser.add_argument('--type', '-ty', required=False, help='Entity type (e.g. Participant)') - parser.add_argument('--idVariable', '-iv', required=False, - help='Stata variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.') + parser.add_argument( + "--path", + "-pa", + required=True, + help="Stata file to import from the Opal filesystem.", + ) + parser.add_argument( + "--locale", "-l", required=False, help="Stata file locale (e.g. fr, en...)." + ) + parser.add_argument( + "--type", "-ty", required=False, help="Entity type (e.g. Participant)" + ) + parser.add_argument( + "--idVariable", + "-iv", + required=False, + help="Stata variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + ) # non specific import arguments io.add_import_arguments(parser) @@ -712,24 +1073,46 @@ def do_command(cls, args): """ # Build and send request # Check input filename extension - if not (args.path.endswith('.dta')): - raise Exception('Input must be a Stata file (.dta).') + if not (args.path.endswith(".dta")): + raise Exception("Input must be a Stata file (.dta).") client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: res = cls(client, args.verbose).import_data( - args.path, args.destination, args.locale, args.type, args.idVariable, - args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.path, + args.destination, + args.locale, + args.type, + args.idVariable, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, destination: str, locale: str = None, entityType: str = None, idVariable: str = None, - tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + path: str, + destination: str, + locale: str = None, + entityType: str = None, + idVariable: str = None, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a STATA file. - + :param path: File to import in Opal file system :param locale: STATA file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) @@ -740,16 +1123,26 @@ def import_data(self, path: str, destination: str, locale: str = None, entityTyp :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination, tables, - incremental, limit, identifiers, - policy, merge, self.verbose) - extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination, + tables, + incremental, + limit, + identifiers, + policy, + merge, + self.verbose, + ) + extension_factory = self.OpalExtensionFactory( + path, locale, entityType, idVariable + ) response = importer.submit(extension_factory) return response.from_json() - + class OpalExtensionFactory(io.OpalImporter.ExtensionFactoryInterface): def __init__(self, path, locale, entityType, idVariable): self.path = path @@ -762,18 +1155,18 @@ def add(self, factory): Add specific datasource factory extension """ extension = { - 'file': self.path, - 'symbol': self.path[self.path.rfind("/") + 1:self.path.rfind(".")] + "file": self.path, + "symbol": self.path[self.path.rfind("/") + 1 : self.path.rfind(".")], } if self.locale: - extension['locale'] = self.locale + extension["locale"] = self.locale if self.entityType: - extension['entityType'] = self.entityType + extension["entityType"] = self.entityType if self.idVariable: - extension['idColumn'] = self.idVariable + extension["idColumn"] = self.idVariable - factory['Magma.RHavenDatasourceFactoryDto.params'] = extension + factory["Magma.RHavenDatasourceFactoryDto.params"] = extension class ImportSQLCommand: @@ -790,7 +1183,9 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--database', '-db', required=True, help='Name of the SQL database.') + parser.add_argument( + "--database", "-db", required=True, help="Name of the SQL database." + ) # non specific import arguments io.add_import_arguments(parser) @@ -804,17 +1199,33 @@ def do_command(cls, args): try: res = cls(client, args.verbose).import_data( args.database, - args.destination, args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.destination, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, database: str, - destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + database: str, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a SQL database. - + :param database: The database name as declared in Opal. See ProjectService.get_databases() for a list of databases with 'import' usage. :param destination: The destination project :param tables: The tables names to be imported (default is all) @@ -822,11 +1233,19 @@ def import_data(self, database: str, :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination, tables, - incremental, limit, identifiers, - policy, merge, self.verbose) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination, + tables, + incremental, + limit, + identifiers, + policy, + merge, + self.verbose, + ) extension_factory = self.OpalExtensionFactory(database) response = importer.submit(extension_factory) @@ -840,7 +1259,9 @@ def add(self, factory): """ Add specific datasource factory extension """ - factory['Magma.JdbcDatasourceFactoryDto.params'] = {'database': self.database} + factory["Magma.JdbcDatasourceFactoryDto.params"] = { + "database": self.database + } class ImportXMLCommand: @@ -857,7 +1278,12 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--path', '-pa', required=True, help='Zip of XML files to import from the Opal filesystem.') + parser.add_argument( + "--path", + "-pa", + required=True, + help="Zip of XML files to import from the Opal filesystem.", + ) # non specific import arguments io.add_import_arguments(parser) @@ -871,17 +1297,33 @@ def do_command(cls, args): try: res = cls(client, args.verbose).import_data( args.path, - args.destination, args.tables, args.incremental, args.limit, args.identifiers, args.policy, args.merge) + args.destination, + args.tables, + args.incremental, + args.limit, + args.identifiers, + args.policy, + args.merge, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def import_data(self, path: str, - destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str = None, policy: str = None, merge: bool = None): + def import_data( + self, + path: str, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + ): """ Import tables from a Opal archive file. - + :param path: File to import in Opal file system :param destination: The destination project :param tables: The tables names to be imported (default is all) @@ -889,11 +1331,19 @@ def import_data(self, path: str, :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) - """ - importer = io.OpalImporter.build(self.client, destination, tables, - incremental, limit, identifiers, - policy, merge, self.verbose) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + """ + importer = io.OpalImporter.build( + self.client, + destination, + tables, + incremental, + limit, + identifiers, + policy, + merge, + self.verbose, + ) extension_factory = self.OpalExtensionFactory(path) response = importer.submit(extension_factory) @@ -907,7 +1357,8 @@ def add(self, factory): """ Add specific datasource factory extension """ - factory['Magma.FsDatasourceFactoryDto.params'] = {'file': self.path} + factory["Magma.FsDatasourceFactoryDto.params"] = {"file": self.path} + class ImportVCFCommand: """ @@ -919,10 +1370,19 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, - help='Project name into which genotypes data will be imported') - parser.add_argument('--vcf', '-vcf', nargs='+', required=True, - help='List of VCF/BCF (optionally compressed) file paths (in Opal file system)') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name into which genotypes data will be imported", + ) + parser.add_argument( + "--vcf", + "-vcf", + nargs="+", + required=True, + help="List of VCF/BCF (optionally compressed) file paths (in Opal file system)", + ) @classmethod def do_command(cls, args): @@ -937,12 +1397,11 @@ def do_command(cls, args): if args.verbose: request.verbose() - options = { - 'project': args.project, - 'files': args.vcf - } + options = {"project": args.project, "files": args.vcf} # send request - uri = core.UriBuilder(['project', args.project, 'commands', '_import_vcf']).build() + uri = core.UriBuilder( + ["project", args.project, "commands", "_import_vcf"] + ).build() request.resource(uri).post().content(json.dumps(options)).send() finally: client.close() @@ -960,13 +1419,15 @@ def __init__(self, client: core.OpalClient, verbose: bool = False): def __init__(self, client: core.OpalClient, verbose: bool = False): self.client = client self.verbose = verbose - + @classmethod def add_arguments(cls, parser): """ Add import_ids command specific options """ - parser.add_argument('--type', '-t', required=True, help='Entity type (e.g. Participant).') + parser.add_argument( + "--type", "-t", required=True, help="Entity type (e.g. Participant)." + ) @classmethod def do_command(cls, args): @@ -976,16 +1437,16 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - print('Enter identifiers (one identifier per line, Ctrl-D to end input):') + print("Enter identifiers (one identifier per line, Ctrl-D to end input):") ids = sys.stdin.read() - cls(client, args.verbose).import_ids(ids.split('\n'), args.type) + cls(client, args.verbose).import_ids(ids.split("\n"), args.type) finally: client.close() def import_ids(self, ids: list, type: str): """ Import a list of identifiers in the IDs database. - + :param ids: The list of identifiers :param type: Entity type (e.g. Participant) """ @@ -994,10 +1455,14 @@ def import_ids(self, ids: list, type: str): if self.verbose: request.verbose() request.content_type_text_plain() - request.content('\n'.join(ids)) + request.content("\n".join(ids)) # send request - uri = core.UriBuilder(['identifiers', 'mappings', 'entities', '_import']).query('type', type).build() + uri = ( + core.UriBuilder(["identifiers", "mappings", "entities", "_import"]) + .query("type", type) + .build() + ) request.post().resource(uri).send() @@ -1019,9 +1484,13 @@ def add_arguments(cls, parser): """ Add import_idsmap command specific options """ - parser.add_argument('--type', '-t', required=True, help='Entity type (e.g. Participant).') - parser.add_argument('--map', '-m', required=True, help='Mapping name.') - parser.add_argument('--separator', '-s', required=False, help='Field separator (default is ,).') + parser.add_argument( + "--type", "-t", required=True, help="Entity type (e.g. Participant)." + ) + parser.add_argument("--map", "-m", required=True, help="Mapping name.") + parser.add_argument( + "--separator", "-s", required=False, help="Field separator (default is ,)." + ) @classmethod def do_command(cls, args): @@ -1031,16 +1500,20 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - print('Enter identifiers (one identifiers mapping per line, Ctrl-D to end input):') + print( + "Enter identifiers (one identifiers mapping per line, Ctrl-D to end input):" + ) ids = sys.stdin.read() - cls(client, args.verbose).import_ids(ids.split('\n'), args.type, args.map, args.separator) + cls(client, args.verbose).import_ids( + ids.split("\n"), args.type, args.map, args.separator + ) finally: client.close() - - def import_ids(self, ids: list, type: str, map: str, separator: str = ','): + + def import_ids(self, ids: list, type: str, map: str, separator: str = ","): """ Import a list of identifiers mappings (each item is a string of separated IDs) in the IDs database. - + :param ids: The list of identifiers :param type: Entity type (e.g. Participant) :param map: The mapping name @@ -1053,11 +1526,13 @@ def import_ids(self, ids: list, type: str, map: str, separator: str = ','): request.verbose() request.content_type_text_plain() - request.content('\n'.join(ids)) + request.content("\n".join(ids)) # send request - builder = core.UriBuilder(['identifiers', 'mapping', map, '_import']).query('type', type) + builder = core.UriBuilder(["identifiers", "mapping", map, "_import"]).query( + "type", type + ) if separator: - builder.query('separator', separator) + builder.query("separator", separator) uri = builder.build() request.post().resource(uri).send() diff --git a/obiba_opal/io.py b/obiba_opal/io.py index 254d23d..6f71432 100644 --- a/obiba_opal/io.py +++ b/obiba_opal/io.py @@ -11,18 +11,50 @@ def add_import_arguments(parser): """ Add Default Import arguments """ - parser.add_argument('--destination', '-d', required=True, help='Destination datasource name') - parser.add_argument('--tables', '-t', nargs='+', required=False, - help='The list of tables to be imported (defaults to all)') - parser.add_argument('--incremental', '-i', action='store_true', - help='Incremental import (new and updated value sets)') - parser.add_argument('--limit', '-li', required=False, type=int, help='Import limit (maximum number of value sets)') - parser.add_argument('--identifiers', '-id', required=False, help='Name of the ID mapping') - parser.add_argument('--policy', '-po', required=False, - help='ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier)') - parser.add_argument('--merge', '-mg', action='store_true', - help='Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden).') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--destination", "-d", required=True, help="Destination datasource name" + ) + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=False, + help="The list of tables to be imported (defaults to all)", + ) + parser.add_argument( + "--incremental", + "-i", + action="store_true", + help="Incremental import (new and updated value sets)", + ) + parser.add_argument( + "--limit", + "-li", + required=False, + type=int, + help="Import limit (maximum number of value sets)", + ) + parser.add_argument( + "--identifiers", "-id", required=False, help="Name of the ID mapping" + ) + parser.add_argument( + "--policy", + "-po", + required=False, + help='ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier)', + ) + parser.add_argument( + "--merge", + "-mg", + action="store_true", + help="Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden).", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) class OpalImporter: @@ -32,20 +64,32 @@ class OpalImporter: class ExtensionFactoryInterface: def add(self, factory): - raise Exception("ExtensionFactoryInterface.add() method must be implemented by a concrete class.") + raise Exception( + "ExtensionFactoryInterface.add() method must be implemented by a concrete class." + ) @classmethod - def build(cls, client: core.OpalClient, destination: str, tables: list = None, incremental: bool = None, limit: int = None, identifiers: str =None, policy: str = None, - merge: bool = None, verbose: bool = False): - setattr(cls, 'client', client) - setattr(cls, 'destination', destination) - setattr(cls, 'tables', tables) - setattr(cls, 'incremental', incremental) - setattr(cls, 'limit', limit) - setattr(cls, 'identifiers', identifiers) - setattr(cls, 'policy', policy) - setattr(cls, 'merge', merge) - setattr(cls, 'verbose', verbose) + def build( + cls, + client: core.OpalClient, + destination: str, + tables: list = None, + incremental: bool = None, + limit: int = None, + identifiers: str = None, + policy: str = None, + merge: bool = None, + verbose: bool = False, + ): + cls.client = client + cls.destination = destination + cls.tables = tables + cls.incremental = incremental + cls.limit = limit + cls.identifiers = identifiers + cls.policy = policy + cls.merge = merge + cls.verbose = verbose return cls() def submit(self, extension_factory) -> core.OpalResponse: @@ -62,55 +106,62 @@ def submit(self, extension_factory) -> core.OpalResponse: request.verbose() # import options - options = {'destination': self.destination} + options = {"destination": self.destination} # tables must be the ones of the transient - tables2import = transient['table'] + tables2import = transient["table"] if self.tables: - def f(t): return any(t in s for s in transient['table']) + + def f(t): + return any(t in s for s in transient["table"]) tables2import = list(filter(f, self.tables)) def table_fullname(t): - return transient['name'] + '.' + t + return transient["name"] + "." + t - options['tables'] = list(map(table_fullname, tables2import)) + options["tables"] = list(map(table_fullname, tables2import)) if self.identifiers: - options['idConfig'] = {'name': self.identifiers} + options["idConfig"] = {"name": self.identifiers} if self.policy: - if self.policy == 'ignore': - options['idConfig']['allowIdentifierGeneration'] = False - options['idConfig']['ignoreUnknownIdentifier'] = True - elif self.policy == 'generate': - options['idConfig']['allowIdentifierGeneration'] = True - options['idConfig']['ignoreUnknownIdentifier'] = False + if self.policy == "ignore": + options["idConfig"]["allowIdentifierGeneration"] = False + options["idConfig"]["ignoreUnknownIdentifier"] = True + elif self.policy == "generate": + options["idConfig"]["allowIdentifierGeneration"] = True + options["idConfig"]["ignoreUnknownIdentifier"] = False else: - options['idConfig']['allowIdentifierGeneration'] = False - options['idConfig']['ignoreUnknownIdentifier'] = False + options["idConfig"]["allowIdentifierGeneration"] = False + options["idConfig"]["ignoreUnknownIdentifier"] = False else: - options['idConfig']['allowIdentifierGeneration'] = False - options['idConfig']['ignoreUnknownIdentifier'] = False + options["idConfig"]["allowIdentifierGeneration"] = False + options["idConfig"]["ignoreUnknownIdentifier"] = False if self.verbose: print("** Import options:") print(options) print("**") - uri = core.UriBuilder(['project', self.destination, 'commands', '_import']).build() + uri = core.UriBuilder( + ["project", self.destination, "commands", "_import"] + ).build() response = request.post().resource(uri).content(json.dumps(options)).send() # get job status location = None - if 'Location' in response.headers: - location = response.headers['Location'] - elif 'location' in response.headers: - location = response.headers['location'] - job_resource = re.sub(r'http.*\/ws', r'', location) + if "Location" in response.headers: + location = response.headers["Location"] + elif "location" in response.headers: + location = response.headers["location"] + job_resource = re.sub(r"http.*\/ws", r"", location) request = self.client.new_request() request.fail_on_error().accept_json() return request.get().resource(job_resource).send() - def __create_transient_datasource(self, extension_factory, ): + def __create_transient_datasource( + self, + extension_factory, + ): """ Create a transient datasource """ @@ -123,27 +174,27 @@ def __create_transient_datasource(self, extension_factory, ): # build transient datasource factory factory = {} if self.incremental: - factory['incrementalConfig'] = { - 'incremental': True, - 'incrementalDestinationName': self.destination + factory["incrementalConfig"] = { + "incremental": True, + "incrementalDestinationName": self.destination, } if self.limit: - factory['batchConfig'] = {'limit': self.limit} + factory["batchConfig"] = {"limit": self.limit} if self.identifiers: - factory['idConfig'] = {'name': self.identifiers} + factory["idConfig"] = {"name": self.identifiers} if self.policy: - if self.policy == 'ignore': - factory['idConfig']['allowIdentifierGeneration'] = False - factory['idConfig']['ignoreUnknownIdentifier'] = True - elif self.policy == 'generate': - factory['idConfig']['allowIdentifierGeneration'] = True - factory['idConfig']['ignoreUnknownIdentifier'] = False + if self.policy == "ignore": + factory["idConfig"]["allowIdentifierGeneration"] = False + factory["idConfig"]["ignoreUnknownIdentifier"] = True + elif self.policy == "generate": + factory["idConfig"]["allowIdentifierGeneration"] = True + factory["idConfig"]["ignoreUnknownIdentifier"] = False else: - factory['idConfig']['allowIdentifierGeneration'] = False - factory['idConfig']['ignoreUnknownIdentifier'] = False + factory["idConfig"]["allowIdentifierGeneration"] = False + factory["idConfig"]["ignoreUnknownIdentifier"] = False else: - factory['idConfig']['allowIdentifierGeneration'] = False - factory['idConfig']['ignoreUnknownIdentifier'] = False + factory["idConfig"]["allowIdentifierGeneration"] = False + factory["idConfig"]["ignoreUnknownIdentifier"] = False extension_factory.add(factory) @@ -153,11 +204,14 @@ def __create_transient_datasource(self, extension_factory, ): print("**") # send request and parse response as a datasource - mergeStr = 'false' + mergeStr = "false" if self.merge: - mergeStr = 'true' - uri = core.UriBuilder(['project', self.destination, 'transient-datasources']).query('merge', - mergeStr).build() + mergeStr = "true" + uri = ( + core.UriBuilder(["project", self.destination, "transient-datasources"]) + .query("merge", mergeStr) + .build() + ) response = request.post().resource(uri).content(json.dumps(factory)).send() transient = json.loads(response.content) @@ -169,22 +223,29 @@ def __create_transient_datasource(self, extension_factory, ): def compare_datasource(self, transient): # Compare datasources : /datasource//compare/ - uri = core.UriBuilder(['datasource', - transient['name'].encode('ascii', 'ignore'), - 'compare', self.destination]).build() + uri = core.UriBuilder( + [ + "datasource", + transient["name"].encode("ascii", "ignore"), + "compare", + self.destination, + ] + ).build() request = self.client.new_request() request.fail_on_error().accept_json().content_type_json() if self.verbose: request.verbose() response = request.get().resource(uri).send() compare = json.loads(response.content) - for i in compare['tableComparisons']: - if i['conflicts']: + for i in compare["tableComparisons"]: + if i["conflicts"]: all_conflicts = [] - for c in i['conflicts']: - all_conflicts.append(c['code'] + "(" + ', '.join(c['arguments']) + ")") + for c in i["conflicts"]: + all_conflicts.append( + c["code"] + "(" + ", ".join(c["arguments"]) + ")" + ) - raise Exception("Import conflicts: " + '; '.join(all_conflicts)) + raise Exception("Import conflicts: " + "; ".join(all_conflicts)) class OpalExporter: @@ -193,17 +254,27 @@ class OpalExporter: """ @classmethod - def build(cls, client: core.OpalClient, datasource: str, tables: list, output: str, incremental: bool = False, multilines: bool = True, identifiers: str = None, - entityIdNames = None, verbose: bool = False): - setattr(cls, 'client', client) - setattr(cls, 'datasource', datasource) - setattr(cls, 'tables', tables) - setattr(cls, 'output', output) - setattr(cls, 'incremental', incremental) - setattr(cls, 'identifiers', identifiers) - setattr(cls, 'multilines', multilines) - setattr(cls, 'entityIdNames', entityIdNames) - setattr(cls, 'verbose', verbose) + def build( + cls, + client: core.OpalClient, + datasource: str, + tables: list, + output: str, + incremental: bool = False, + multilines: bool = True, + identifiers: str = None, + entityIdNames=None, + verbose: bool = False, + ): + cls.client = client + cls.datasource = datasource + cls.tables = tables + cls.output = output + cls.incremental = incremental + cls.identifiers = identifiers + cls.multilines = multilines + cls.entityIdNames = entityIdNames + cls.verbose = verbose return cls() def setClient(self, client): @@ -213,26 +284,27 @@ def setClient(self, client): def submit(self, format) -> core.OpalResponse: # export options options = { - 'format': format, - 'out': self.output, - 'nonIncremental': not self.incremental, - 'multilines': self.multilines, - 'noVariables': False + "format": format, + "out": self.output, + "nonIncremental": not self.incremental, + "multilines": self.multilines, + "noVariables": False, } if self.entityIdNames: - options['entityIdNames'] = self.entityIdNames + options["entityIdNames"] = self.entityIdNames if self.tables: tables2export = self.tables - def table_fullname(t): return self.datasource + '.' + t + def table_fullname(t): + return self.datasource + "." + t - options['tables'] = list(map(table_fullname, tables2export)) + options["tables"] = list(map(table_fullname, tables2export)) if self.identifiers: - options['idConfig'] = { - 'name': self.identifiers, - 'allowIdentifierGeneration': False, - 'ignoreUnknownIdentifier': False + options["idConfig"] = { + "name": self.identifiers, + "allowIdentifierGeneration": False, + "ignoreUnknownIdentifier": False, } if self.verbose: print("** Export options:") @@ -246,16 +318,18 @@ def table_fullname(t): return self.datasource + '.' + t if self.verbose: request.verbose() - uri = core.UriBuilder(['project', self.datasource, 'commands', '_export']).build() + uri = core.UriBuilder( + ["project", self.datasource, "commands", "_export"] + ).build() response = request.post().resource(uri).content(json.dumps(options)).send() # get job status location = None - if 'Location' in response.headers: - location = response.headers['Location'] - elif 'location' in response.headers: - location = response.headers['location'] - job_resource = re.sub(r'http.*\/ws', r'', location) + if "Location" in response.headers: + location = response.headers["Location"] + elif "location" in response.headers: + location = response.headers["location"] + job_resource = re.sub(r"http.*\/ws", r"", location) request = self.client.new_request() request.fail_on_error().accept_json() return request.get().resource(job_resource).send() @@ -267,15 +341,25 @@ class OpalCopier: """ @classmethod - def build(cls, client, datasource, tables, destination, name, incremental=False, nulls=False, verbose=None): - setattr(cls, 'client', client) - setattr(cls, 'datasource', datasource) - setattr(cls, 'tables', tables) - setattr(cls, 'destination', destination) - setattr(cls, 'name', name) - setattr(cls, 'incremental', incremental) - setattr(cls, 'nulls', nulls) - setattr(cls, 'verbose', verbose) + def build( + cls, + client, + datasource, + tables, + destination, + name, + incremental=False, + nulls=False, + verbose=None, + ): + cls.client = client + cls.datasource = datasource + cls.tables = tables + cls.destination = destination + cls.name = name + cls.incremental = incremental + cls.nulls = nulls + cls.verbose = verbose return cls() def setClient(self, client): @@ -285,23 +369,24 @@ def setClient(self, client): def submit(self): # copy options options = { - 'destination': self.destination, - 'nonIncremental': not self.incremental, - 'noVariables': False, - 'noValues': False, - 'destinationTableName': None, - 'copyNullValues': self.nulls, - 'tables': [] + "destination": self.destination, + "nonIncremental": not self.incremental, + "noVariables": False, + "noValues": False, + "destinationTableName": None, + "copyNullValues": self.nulls, + "tables": [], } if self.tables: tables2copy = self.tables - def table_fullname(t): return self.datasource + '.' + t + def table_fullname(t): + return self.datasource + "." + t - options['tables'] = list(map(table_fullname, tables2copy)) + options["tables"] = list(map(table_fullname, tables2copy)) # name option will be ignored if more than one table if self.name: - options['destinationTableName'] = self.name + options["destinationTableName"] = self.name if self.verbose: print("** Copy options:") @@ -315,16 +400,16 @@ def table_fullname(t): return self.datasource + '.' + t if self.verbose: request.verbose() - uri = core.UriBuilder(['project', self.datasource, 'commands', '_copy']).build() + uri = core.UriBuilder(["project", self.datasource, "commands", "_copy"]).build() response = request.post().resource(uri).content(json.dumps(options)).send() # get job status location = None - if 'Location' in response.headers: - location = response.headers['Location'] - elif 'location' in response.headers: - location = response.headers['location'] - job_resource = re.sub(r'http.*\/ws', r'', location) + if "Location" in response.headers: + location = response.headers["Location"] + elif "location" in response.headers: + location = response.headers["location"] + job_resource = re.sub(r"http.*\/ws", r"", location) request = self.client.new_request() request.fail_on_error().accept_json() return request.get().resource(job_resource).send() diff --git a/obiba_opal/perm.py b/obiba_opal/perm.py index d44a77a..cfb6173 100644 --- a/obiba_opal/perm.py +++ b/obiba_opal/perm.py @@ -4,12 +4,13 @@ import obiba_opal.core as core + class PermService: """ Base class for permissions management. """ - SUBJECT_TYPES = ('USER', 'GROUP') + SUBJECT_TYPES = ("USER", "GROUP") def __init__(self, client: core.OpalClient, verbose: bool = False): self.client = client @@ -20,13 +21,43 @@ def _add_permission_arguments(self, parser, permissions: list): """ Add permission arguments """ - parser.add_argument('--fetch', '-f', action='store_true', required=False, help='Fetch permissions') - parser.add_argument('--add', '-a', action='store_true', required=False, help='Add a permission') - parser.add_argument('--delete', '-d', action='store_true', required=False, help='Delete a permission') - parser.add_argument('--permission', '-pe', help="Permission to apply: %s" % ', '.join(permissions)) - parser.add_argument('--subject', '-s', required=False, help='Subject name to which the permission will be granted/removed (required on add/delete)') - parser.add_argument('--type', '-ty', required=True, help='Subject type: user or group') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--fetch", + "-f", + action="store_true", + required=False, + help="Fetch permissions", + ) + parser.add_argument( + "--add", "-a", action="store_true", required=False, help="Add a permission" + ) + parser.add_argument( + "--delete", + "-d", + action="store_true", + required=False, + help="Delete a permission", + ) + parser.add_argument( + "--permission", + "-pe", + help=f"Permission to apply: {', '.join(permissions)}", + ) + parser.add_argument( + "--subject", + "-s", + required=False, + help="Subject name to which the permission will be granted/removed (required on add/delete)", + ) + parser.add_argument( + "--type", "-ty", required=True, help="Subject type: user or group" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def _map_permission(self, permission: str, permissions: dict): @@ -47,35 +78,45 @@ def _validate_args(self, args, permissions): if not args.subject: raise ValueError("The subject name is required") if not args.permission: - raise ValueError("A permission name is required: %s" % ', '.join(list(permissions.keys()))) + raise ValueError( + f"A permission name is required: {', '.join(list(permissions.keys()))}" + ) if self._map_permission(args.permission, permissions) is None: - raise ValueError("Valid permissions are: %s" % ', '.join(list(permissions.keys()))) + raise ValueError( + f"Valid permissions are: {', '.join(list(permissions.keys()))}" + ) if args.delete: if not args.subject: raise ValueError("The subject name is required") - - if not args.type or args.type.upper() not in self.SUBJECT_TYPES: - raise ValueError("Valid subject types are: %s" % ', '.join(self.SUBJECT_TYPES).lower()) - def _make_add_ws(self, path: list, subject: str, type: str, permission: str, permissions: dict): - return core.UriBuilder(path) \ - .query('type', type.upper()) \ - .query('permission', self._map_permission(permission, permissions)) \ - .query('principal', subject) \ - .build() - - def _make_delete_ws(self, path: list, subject: str, type: str = 'user'): - return core.UriBuilder(path) \ - .query('type', type.upper()) \ - .query('principal', subject) \ + if not args.type or args.type.upper() not in self.SUBJECT_TYPES: + raise ValueError( + f"Valid subject types are: {', '.join(self.SUBJECT_TYPES).lower()}" + ) + + def _make_add_ws( + self, path: list, subject: str, type: str, permission: str, permissions: dict + ): + return ( + core.UriBuilder(path) + .query("type", type.upper()) + .query("permission", self._map_permission(permission, permissions)) + .query("principal", subject) .build() - - def _make_get_ws(self, path: list, type: str = 'user'): - return core.UriBuilder(path) \ - .query('type', type.upper()) \ + ) + + def _make_delete_ws(self, path: list, subject: str, type: str = "user"): + return ( + core.UriBuilder(path) + .query("type", type.upper()) + .query("principal", subject) .build() - + ) + + def _make_get_ws(self, path: list, type: str = "user"): + return core.UriBuilder(path).query("type", type.upper()).build() + def _make_request(self, fail_safe: bool = False): request = self.client.new_request() if not fail_safe: @@ -90,9 +131,7 @@ class ProjectPermService(PermService): Project permissions management. """ - PERMISSIONS = { - 'administrate': 'PROJECT_ALL' - } + PERMISSIONS = {"administrate": "PROJECT_ALL"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -103,7 +142,7 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name') + parser.add_argument("--project", "-pr", required=True, help="Project name") @classmethod def do_command(cls, args): @@ -126,7 +165,7 @@ def do_command(cls, args): core.Formatter.print_json(res, args.json) finally: client.close() - + def get_perms(self, project: str, type: str) -> list: """ Get the project permissions. @@ -135,10 +174,15 @@ def get_perms(self, project: str, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'project'], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws(["project", project, "permissions", "project"], type) + ) + .send() + ) return response.from_json() - + def delete_perm(self, project: str, subject: str, type: str): """ Delete project level permissions. @@ -149,7 +193,10 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'project'], subject, type)).send() + self._make_delete_ws( + ["project", project, "permissions", "project"], subject, type + ) + ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): """ @@ -162,8 +209,15 @@ def add_perm(self, project: str, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'project'], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + ["project", project, "permissions", "project"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + class DatasourcePermService(PermService): """ @@ -171,11 +225,11 @@ class DatasourcePermService(PermService): """ PERMISSIONS = { - 'view-value': 'DATASOURCE_VIEW', - 'add-table': 'TABLE_ADD', - 'administrate': 'DATASOURCE_ALL' + "view-value": "DATASOURCE_VIEW", + "add-table": "TABLE_ADD", + "administrate": "DATASOURCE_ALL", } - + def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -185,7 +239,12 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name to which the tables belong') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the tables belong", + ) @classmethod def do_command(cls, args): @@ -217,10 +276,17 @@ def get_perms(self, project: str, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'datasource'], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws( + ["project", project, "permissions", "datasource"], type + ) + ) + .send() + ) return response.from_json() - + def delete_perm(self, project: str, subject: str, type: str): """ Delete project's datasource level permissions. @@ -231,7 +297,10 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'datasource'], subject, type)).send() + self._make_delete_ws( + ["project", project, "permissions", "datasource"], subject, type + ) + ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): """ @@ -244,7 +313,14 @@ def add_perm(self, project: str, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'datasource'], subject, type, permission, self.PERMISSIONS)).send() + self._make_add_ws( + ["project", project, "permissions", "datasource"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() class TablePermService(PermService): @@ -253,11 +329,11 @@ class TablePermService(PermService): """ PERMISSIONS = { - 'view': 'TABLE_READ', - 'view-value': 'TABLE_VALUES', - 'edit': 'TABLE_EDIT', - 'edit-values': 'TABLE_VALUES_EDIT', - 'administrate': 'TABLE_ALL' + "view": "TABLE_READ", + "view-value": "TABLE_VALUES", + "edit": "TABLE_EDIT", + "edit-values": "TABLE_VALUES_EDIT", + "administrate": "TABLE_ALL", } def __init__(self, client: core.OpalClient, verbose: bool = False): @@ -269,11 +345,21 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name to which the tables belong') - parser.add_argument('--tables', '-t', nargs='+', required=False, - help='List of table names on which the permission is to be get/set (default is all)') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the tables belong", + ) + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=False, + help="List of table names on which the permission is to be get/set (default is all)", + ) - @classmethod + @classmethod def do_command(cls, args): """ Execute permission command @@ -289,7 +375,9 @@ def do_command(cls, args): if args.delete: service.delete_perms(args.project, args.tables, args.subject, args.type) elif args.add: - service.add_perms(args.project, args.tables, args.subject, args.type, args.permission) + service.add_perms( + args.project, args.tables, args.subject, args.type, args.permission + ) else: res = [] for table in service._ensure_tables(args.project, args.tables): @@ -301,13 +389,20 @@ def do_command(cls, args): def get_perms(self, project: str, table: str, type: str) -> list: """ Get the table permissions. - + :param project: The project name :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'table', table], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws( + ["project", project, "permissions", "table", table], type + ) + ) + .send() + ) return response.from_json() def delete_perms(self, project: str, tables: list, subject: str, type: str): @@ -334,9 +429,14 @@ def delete_perm(self, project: str, table: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'table', table], subject, type)).send() + self._make_delete_ws( + ["project", project, "permissions", "table", table], subject, type + ) + ).send() - def add_perms(self, project: str, tables: list, subject: str, type: str, permission: str): + def add_perms( + self, project: str, tables: list, subject: str, type: str, permission: str + ): """ Add project's tables level permissions. @@ -350,7 +450,9 @@ def add_perms(self, project: str, tables: list, subject: str, type: str, permiss for table in tables_: self.add_perm(project, table, subject, type, permission) - def add_perm(self, project: str, table: str, subject: str, type: str, permission: str): + def add_perm( + self, project: str, table: str, subject: str, type: str, permission: str + ): """ Add project's table level permissions. @@ -362,16 +464,28 @@ def add_perm(self, project: str, table: str, subject: str, type: str, permission """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'table', table], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + ["project", project, "permissions", "table", table], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + def _ensure_tables(self, project: str, tables: list) -> list: """ Get the table names of the project's datasource if none is specified. """ if not tables: request = self._make_request() - res = request.get().resource(core.UriBuilder(['datasource', project, 'tables']).build()).send().from_json() - return [x['name'] for x in res] + res = ( + request.get() + .resource(core.UriBuilder(["datasource", project, "tables"]).build()) + .send() + .from_json() + ) + return [x["name"] for x in res] else: return tables @@ -381,9 +495,7 @@ class VariablePermService(PermService): Project table variables permissions management. """ - PERMISSIONS = { - 'view': 'VARIABLE_READ' - } + PERMISSIONS = {"view": "VARIABLE_READ"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -394,12 +506,27 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name to which the tables belong') - parser.add_argument('--table', '-t', required=True, help='Table name to which the variables belong') - parser.add_argument('--variables', '-va', nargs='+', required=False, - help='List of variable names on which the permission is to be get/set (default is all)') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the tables belong", + ) + parser.add_argument( + "--table", + "-t", + required=True, + help="Table name to which the variables belong", + ) + parser.add_argument( + "--variables", + "-va", + nargs="+", + required=False, + help="List of variable names on which the permission is to be get/set (default is all)", + ) - @classmethod + @classmethod def do_command(cls, args): """ Execute permission command @@ -413,13 +540,26 @@ def do_command(cls, args): # send request if args.delete: - service.delete_perms(args.project, args.table, args.variables, args.subject, args.type) + service.delete_perms( + args.project, args.table, args.variables, args.subject, args.type + ) elif args.add: - service.add_perms(args.project, args.table, args.variables, args.subject, args.type, args.permission) + service.add_perms( + args.project, + args.table, + args.variables, + args.subject, + args.type, + args.permission, + ) else: res = [] - for variable in service._ensure_variables(args.project, args.table, args.variables): - res = res + service.get_perms(args.project, args.table, variable, args.type) + for variable in service._ensure_variables( + args.project, args.table, args.variables + ): + res = res + service.get_perms( + args.project, args.table, variable, args.type + ) core.Formatter.print_json(res, args.json) finally: client.close() @@ -434,11 +574,29 @@ def get_perms(self, project: str, table: str, variable: str, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'table', table, 'variable', variable], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws( + [ + "project", + project, + "permissions", + "table", + table, + "variable", + variable, + ], + type, + ) + ) + .send() + ) return response.from_json() - - def delete_perms(self, project: str, table: str, variables: list, subject: str, type: str): + + def delete_perms( + self, project: str, table: str, variables: list, subject: str, type: str + ): """ Delete project's table variables level permissions. @@ -452,7 +610,9 @@ def delete_perms(self, project: str, table: str, variables: list, subject: str, for variable in variables_: self.delete_perm(project, table, variable, subject, type) - def delete_perm(self, project: str, table: str, variable: str, subject: str, type: str): + def delete_perm( + self, project: str, table: str, variable: str, subject: str, type: str + ): """ Delete project's table variable level permissions. @@ -464,9 +624,30 @@ def delete_perm(self, project: str, table: str, variable: str, subject: str, typ """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'table', table, 'variable', variable], subject, type)).send() - - def add_perms(self, project: str, table: str, variables: list, subject: str, type: str, permission: str): + self._make_delete_ws( + [ + "project", + project, + "permissions", + "table", + table, + "variable", + variable, + ], + subject, + type, + ) + ).send() + + def add_perms( + self, + project: str, + table: str, + variables: list, + subject: str, + type: str, + permission: str, + ): """ Add project's table variables level permissions. @@ -481,7 +662,15 @@ def add_perms(self, project: str, table: str, variables: list, subject: str, typ for variable in variables_: self.add_perm(project, table, variable, subject, type, permission) - def add_perm(self, project: str, table: str, variable: str, subject: str, type: str, permission: str): + def add_perm( + self, + project: str, + table: str, + variable: str, + subject: str, + type: str, + permission: str, + ): """ Add project's table variable level permissions. @@ -494,16 +683,40 @@ def add_perm(self, project: str, table: str, variable: str, subject: str, type: """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'table', table, 'variable', variable], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + [ + "project", + project, + "permissions", + "table", + table, + "variable", + variable, + ], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + def _ensure_variables(self, project: str, table: str, variables: list) -> list: """ Get the table's variable names of the project's datasource if none is specified. """ if not variables: request = self._make_request() - res = request.get().resource(core.UriBuilder(['datasource', project, 'table', table, 'variables']).build()).send().from_json() - return [x['name'] for x in res] + res = ( + request.get() + .resource( + core.UriBuilder( + ["datasource", project, "table", table, "variables"] + ).build() + ) + .send() + .from_json() + ) + return [x["name"] for x in res] else: return variables @@ -513,10 +726,7 @@ class ResourcePermService(PermService): Project resource permissions management. """ - PERMISSIONS = { - 'view': 'RESOURCE_VIEW', - 'administrate': 'RESOURCE_ALL' - } + PERMISSIONS = {"view": "RESOURCE_VIEW", "administrate": "RESOURCE_ALL"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -527,11 +737,21 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name to which the resources belong') - parser.add_argument('--resources', '-r', nargs='+', required=False, - help='List of resource names on which the permission is to be get/set (default is all)') - - @classmethod + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the resources belong", + ) + parser.add_argument( + "--resources", + "-r", + nargs="+", + required=False, + help="List of resource names on which the permission is to be get/set (default is all)", + ) + + @classmethod def do_command(cls, args): """ Execute permission command @@ -545,9 +765,17 @@ def do_command(cls, args): # send request if args.delete: - service.delete_perms(args.project, args.resources, args.subject, args.type) + service.delete_perms( + args.project, args.resources, args.subject, args.type + ) elif args.add: - service.add_perms(args.project, args.resources, args.subject, args.type, args.permission) + service.add_perms( + args.project, + args.resources, + args.subject, + args.type, + args.permission, + ) else: res = [] for resource in service._ensure_resources(args.project, args.resources): @@ -564,10 +792,17 @@ def get_perms(self, project: str, resource: str, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'resource', resource], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws( + ["project", project, "permissions", "resource", resource], type + ) + ) + .send() + ) return response.from_json() - + def delete_perms(self, project: str, resources: list, subject: str, type: str): """ Delete project's resources level permissions. @@ -592,9 +827,14 @@ def delete_perm(self, project: str, resource: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'resource', resource], subject, type)).send() + self._make_delete_ws( + ["project", project, "permissions", "resource", resource], subject, type + ) + ).send() - def add_perms(self, project: str, resources: list, subject: str, type: str, permission: str): + def add_perms( + self, project: str, resources: list, subject: str, type: str, permission: str + ): """ Add project's resources level permissions. @@ -608,7 +848,9 @@ def add_perms(self, project: str, resources: list, subject: str, type: str, perm for resource in resources_: self.add_perm(project, resource, subject, type, permission) - def add_perm(self, project: str, resource: str, subject: str, type: str, permission: str): + def add_perm( + self, project: str, resource: str, subject: str, type: str, permission: str + ): """ Add project's resource level permissions. @@ -620,16 +862,28 @@ def add_perm(self, project: str, resource: str, subject: str, type: str, permiss """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'resource', resource], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + ["project", project, "permissions", "resource", resource], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + def _ensure_resources(self, project: str, resources: list) -> list: """ Get the resource names of the project if none is specified. """ if not resources: request = self._make_request() - res = request.get().resource(core.UriBuilder(['project', project, 'resources']).build()).send().from_json() - return [x['name'] for x in res] + res = ( + request.get() + .resource(core.UriBuilder(["project", project, "resources"]).build()) + .send() + .from_json() + ) + return [x["name"] for x in res] else: return resources @@ -639,10 +893,7 @@ class ResourcesPermService(PermService): Project resources permissions management. """ - PERMISSIONS = { - 'view': 'RESOURCES_VIEW', - 'administrate': 'RESOURCES_ALL' - } + PERMISSIONS = {"view": "RESOURCES_VIEW", "administrate": "RESOURCES_ALL"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -653,7 +904,12 @@ def add_arguments(cls, parser): Add command specific options """ cls._add_permission_arguments(parser, list(cls.PERMISSIONS.keys())) - parser.add_argument('--project', '-pr', required=True, help='Project name to which the resources belong') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the resources belong", + ) @classmethod def do_command(cls, args): @@ -685,10 +941,17 @@ def get_perms(self, project: str, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['project', project, 'permissions', 'resources'], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws( + ["project", project, "permissions", "resources"], type + ) + ) + .send() + ) return response.from_json() - + def delete_perm(self, project: str, subject: str, type: str): """ Delete project resources level permissions. @@ -699,7 +962,10 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['project', project, 'permissions', 'resources'], subject, type)).send() + self._make_delete_ws( + ["project", project, "permissions", "resources"], subject, type + ) + ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): """ @@ -712,7 +978,14 @@ def add_perm(self, project: str, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['project', project, 'permissions', 'resources'], subject, type, permission, self.PERMISSIONS)).send() + self._make_add_ws( + ["project", project, "permissions", "resources"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() class RPermService(PermService): @@ -720,9 +993,7 @@ class RPermService(PermService): R service permissions management. """ - PERMISSIONS = { - 'use': 'R_USE' - } + PERMISSIONS = {"use": "R_USE"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -763,10 +1034,13 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['system', 'permissions', 'r'], type)).send() + response = ( + request.get() + .resource(self._make_get_ws(["system", "permissions", "r"], type)) + .send() + ) return response.from_json() - + def delete_perm(self, subject: str, type: str): """ Delete R level permissions. @@ -776,7 +1050,8 @@ def delete_perm(self, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['system', 'permissions', 'r'], subject, type)).send() + self._make_delete_ws(["system", "permissions", "r"], subject, type) + ).send() def add_perm(self, subject: str, type: str, permission: str): """ @@ -788,18 +1063,22 @@ def add_perm(self, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['system', 'permissions', 'r'], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + ["system", "permissions", "r"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + class DataSHIELDPermService(PermService): """ DataSHIELD service permissions management. """ - PERMISSIONS = { - 'use': 'DATASHIELD_USE', - 'administrate': 'DATASHIELD_ALL' - } + PERMISSIONS = {"use": "DATASHIELD_USE", "administrate": "DATASHIELD_ALL"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -840,10 +1119,13 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['system', 'permissions', 'datashield'], type)).send() + response = ( + request.get() + .resource(self._make_get_ws(["system", "permissions", "datashield"], type)) + .send() + ) return response.from_json() - + def delete_perm(self, subject: str, type: str): """ Delete DataSHIELD level permissions. @@ -853,7 +1135,8 @@ def delete_perm(self, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['system', 'permissions', 'datashield'], subject, type)).send() + self._make_delete_ws(["system", "permissions", "datashield"], subject, type) + ).send() def add_perm(self, subject: str, type: str, permission: str): """ @@ -865,18 +1148,22 @@ def add_perm(self, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['system', 'permissions', 'datashield'], subject, type, permission, self.PERMISSIONS)).send() - + self._make_add_ws( + ["system", "permissions", "datashield"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() + class SystemPermService(PermService): """ System administration permissions management. """ - PERMISSIONS = { - 'add-project': 'PROJECT_ADD', - 'administrate': 'SYSTEM_ALL' - } + PERMISSIONS = {"add-project": "PROJECT_ADD", "administrate": "SYSTEM_ALL"} def __init__(self, client: core.OpalClient, verbose: bool = False): super().__init__(client, verbose) @@ -909,7 +1196,7 @@ def do_command(cls, args): core.Formatter.print_json(res, args.json) finally: client.close() - + def get_perms(self, type: str) -> list: """ Get the system administration permissions. @@ -917,10 +1204,14 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = request.get().resource( - self._make_get_ws(['system', 'permissions', 'administration'], type)).send() + response = ( + request.get() + .resource( + self._make_get_ws(["system", "permissions", "administration"], type) + ) + .send() + ) return response.from_json() - def delete_perm(self, subject: str, type: str): """ @@ -931,7 +1222,10 @@ def delete_perm(self, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws(['system', 'permissions', 'administration'], subject, type)).send() + self._make_delete_ws( + ["system", "permissions", "administration"], subject, type + ) + ).send() def add_perm(self, subject: str, type: str, permission: str): """ @@ -943,4 +1237,11 @@ def add_perm(self, subject: str, type: str, permission: str): """ request = self._make_request() request.post().resource( - self._make_add_ws(['system', 'permissions', 'administration'], subject, type, permission, self.PERMISSIONS)).send() + self._make_add_ws( + ["system", "permissions", "administration"], + subject, + type, + permission, + self.PERMISSIONS, + ) + ).send() diff --git a/obiba_opal/project.py b/obiba_opal/project.py index 11b6a0a..bf26058 100755 --- a/obiba_opal/project.py +++ b/obiba_opal/project.py @@ -7,6 +7,7 @@ import sys import re + class ProjectService: """ Project management. @@ -21,20 +22,61 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--name', '-n', required=False, - help='Project name. Not specifying the project name, will get the list of the projects.') - parser.add_argument('--database', '-db', required=False, - help='Project database name. If not provided only views can be added.') - parser.add_argument('--title', '-t', required=False, help='Project title.') - parser.add_argument('--description', '-dc', required=False, help='Project description.') - parser.add_argument('--tags', '-tg', nargs='+', required=False, help='Tags to apply to the project.') - parser.add_argument('--export-folder', '-ex', required=False, help='Project preferred export folder.') - - parser.add_argument('--add', '-a', action='store_true', help='Add a project (requires at least a project name).') - parser.add_argument('--delete', '-de', action='store_true', required=False, - help='Delete a project (requires at least a project name).') - parser.add_argument('--force', '-f', action='store_true', help='Skip confirmation on project deletion') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--name", + "-n", + required=False, + help="Project name. Not specifying the project name, will get the list of the projects.", + ) + parser.add_argument( + "--database", + "-db", + required=False, + help="Project database name. If not provided only views can be added.", + ) + parser.add_argument("--title", "-t", required=False, help="Project title.") + parser.add_argument( + "--description", "-dc", required=False, help="Project description." + ) + parser.add_argument( + "--tags", + "-tg", + nargs="+", + required=False, + help="Tags to apply to the project.", + ) + parser.add_argument( + "--export-folder", + "-ex", + required=False, + help="Project preferred export folder.", + ) + + parser.add_argument( + "--add", + "-a", + action="store_true", + help="Add a project (requires at least a project name).", + ) + parser.add_argument( + "--delete", + "-de", + action="store_true", + required=False, + help="Delete a project (requires at least a project name).", + ) + parser.add_argument( + "--force", + "-f", + action="store_true", + help="Skip confirmation on project deletion", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -47,19 +89,26 @@ def do_command(cls, args): service = ProjectService(client, args.verbose) if args.add: - service.add_project(args.name, args.database, args.title, args.description, args.tags, args.export_folder) + service.add_project( + args.name, + args.database, + args.title, + args.description, + args.tags, + args.export_folder, + ) elif args.delete: if not args.name: - raise Exception('A project name is required.') + raise Exception("A project name is required.") # confirm if args.force: service.delete_project(args.name) else: confirmed = input('Delete the project "' + args.name + '"? [y/N]: ') - if confirmed == 'y': + if confirmed == "y": service.delete_project(args.name) else: - print('Aborted.') + print("Aborted.") sys.exit(0) elif not args.name: res = service.get_projects() @@ -73,7 +122,7 @@ def get_projects(self) -> list: Get the projects. """ request = self._make_request() - response = request.get().resource(core.UriBuilder(['projects']).build()).send() + response = request.get().resource(core.UriBuilder(["projects"]).build()).send() return response.from_json() def get_project(self, name: str, fail_safe: bool = True) -> dict: @@ -83,9 +132,11 @@ def get_project(self, name: str, fail_safe: bool = True) -> dict: :param name: The project name """ if not name: - raise ValueError('The project name is required.') + raise ValueError("The project name is required.") request = self._make_request(fail_safe) - response = request.get().resource(core.UriBuilder(['project', name]).build()).send() + response = ( + request.get().resource(core.UriBuilder(["project", name]).build()).send() + ) return response.from_json() if response.code == 200 else None def delete_project(self, name: str): @@ -95,11 +146,19 @@ def delete_project(self, name: str): :param name: The project name """ if not name: - raise ValueError('The project name is required.') + raise ValueError("The project name is required.") request = self._make_request() - request.delete().resource(core.UriBuilder(['project', name]).build()).send() - - def add_project(self, name: str, database: str = None, title: str = None, description: str = None, tags: list = None, export_folder: str = None): + request.delete().resource(core.UriBuilder(["project", name]).build()).send() + + def add_project( + self, + name: str, + database: str = None, + title: str = None, + description: str = None, + tags: list = None, + export_folder: str = None, + ): """ Add a project. @@ -111,28 +170,30 @@ def add_project(self, name: str, database: str = None, title: str = None, descri :param export_folder: The project's preferred export folder """ if not name: - raise ValueError('The project name is required.') + raise ValueError("The project name is required.") # create project - project = {'name': name} + project = {"name": name} if database: - project['database'] = database + project["database"] = database if title: - project['title'] = title + project["title"] = title else: - project['title'] = name + project["title"] = name if description: - project['description'] = description + project["description"] = description if tags: - project['tags'] = tags + project["tags"] = tags if export_folder: - project['exportFolder'] = export_folder + project["exportFolder"] = export_folder request = self._make_request() request.accept_json().content_type_json() - request.post().resource(core.UriBuilder(['projects']).build()).content(json.dumps(project)).send() + request.post().resource(core.UriBuilder(["projects"]).build()).content( + json.dumps(project) + ).send() - def get_databases(self, usage: str = 'storage') -> list: + def get_databases(self, usage: str = "storage") -> list: """ Get the databases available. @@ -140,7 +201,13 @@ def get_databases(self, usage: str = 'storage') -> list: """ request = self._make_request() request.accept_json() - response = request.get().resource(core.UriBuilder(['system', 'databases']).query('usage', usage).build()).send() + response = ( + request.get() + .resource( + core.UriBuilder(["system", "databases"]).query("usage", usage).build() + ) + .send() + ) return response.from_json() def _make_request(self, fail_safe: bool = False): @@ -166,12 +233,33 @@ def add_arguments(self, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Source project name') - parser.add_argument('--archive', '-ar', required=True, help='Archive directory path in the Opal file system') - parser.add_argument('--views-as-tables', '-vt', action='store_true', - help='Treat views as tables, i.e. export data instead of keeping derivation scripts (default is false)') - parser.add_argument('--force', '-f', action='store_true', help='Force overwriting an existing backup folder') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--project", "-pr", required=True, help="Source project name" + ) + parser.add_argument( + "--archive", + "-ar", + required=True, + help="Archive directory path in the Opal file system", + ) + parser.add_argument( + "--views-as-tables", + "-vt", + action="store_true", + help="Treat views as tables, i.e. export data instead of keeping derivation scripts (default is false)", + ) + parser.add_argument( + "--force", + "-f", + action="store_true", + help="Force overwriting an existing backup folder", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -180,13 +268,21 @@ def do_command(self, args): """ client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = BackupProjectCommand(client, args.verbose).backup_project(args.project, args.archive, args.views_as_tables, args.force) + res = BackupProjectCommand(client, args.verbose).backup_project( + args.project, args.archive, args.views_as_tables, args.force + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def backup_project(self, project: str, archive: str, views_as_tables: bool = False, force: bool = False) -> dict: + def backup_project( + self, + project: str, + archive: str, + views_as_tables: bool = False, + force: bool = False, + ) -> dict: """ Prepare the backup parameters and launch the backup task on the project @@ -197,13 +293,13 @@ def backup_project(self, project: str, archive: str, views_as_tables: bool = Fal """ # Build and send request # backup options - options = {'archive': archive} + options = {"archive": archive} if views_as_tables: - options['viewsAsTables'] = views_as_tables + options["viewsAsTables"] = views_as_tables if force: - options['override'] = force + options["override"] = force - uri = core.UriBuilder(['project', project, 'commands', '_backup']).build() + uri = core.UriBuilder(["project", project, "commands", "_backup"]).build() request = self.client.new_request() request.fail_on_error().accept_json().content_type_json() if self.verbose: @@ -212,7 +308,7 @@ def backup_project(self, project: str, archive: str, views_as_tables: bool = Fal # get job status location = response.get_location() - job_resource = re.sub(r'http.*\/ws', r'', location) + job_resource = re.sub(r"http.*\/ws", r"", location) request = self.client.new_request() request.fail_on_error().accept_json() if self.verbose: @@ -235,13 +331,36 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Destination project name (must exist)') - parser.add_argument('--archive', '-ar', required=True, - help='Archive directory or zip file path in the Opal file system') - parser.add_argument('--arpassword', '-arp', required=False, help='Password to decrypt zip archive (optional)') - parser.add_argument('--force', '-f', action='store_true', - help='Force overwriting existing items (table, view, resource, report). Files override is not checked') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Destination project name (must exist)", + ) + parser.add_argument( + "--archive", + "-ar", + required=True, + help="Archive directory or zip file path in the Opal file system", + ) + parser.add_argument( + "--arpassword", + "-arp", + required=False, + help="Password to decrypt zip archive (optional)", + ) + parser.add_argument( + "--force", + "-f", + action="store_true", + help="Force overwriting existing items (table, view, resource, report). Files override is not checked", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -251,14 +370,14 @@ def do_command(cls, args): # Build and send request # restore options - options = {'archive': args.archive} + options = {"archive": args.archive} if args.force: - options['override'] = args.force + options["override"] = args.force if args.arpassword: - options['password'] = args.arpassword + options["password"] = args.arpassword client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) - uri = core.UriBuilder(['project', args.project, 'commands', '_restore']).build() + uri = core.UriBuilder(["project", args.project, "commands", "_restore"]).build() request = client.new_request() request.fail_on_error().accept_json().content_type_json() if args.verbose: @@ -267,18 +386,18 @@ def do_command(cls, args): # get job status location = None - if 'Location' in response.headers: - location = response.headers['Location'] - elif 'location' in response.headers: - location = response.headers['location'] - job_resource = re.sub(r'http.*\/ws', r'', location) + if "Location" in response.headers: + location = response.headers["Location"] + elif "location" in response.headers: + location = response.headers["location"] + job_resource = re.sub(r"http.*\/ws", r"", location) request = client.new_request() request.fail_on_error().accept_json() if args.verbose: request.verbose() response = request.get().resource(job_resource).send() # format response - res = response.content.decode('utf-8') + res = response.content.decode("utf-8") if args.json: res = response.pretty_json() diff --git a/obiba_opal/security.py b/obiba_opal/security.py index a34cd94..20eb9c8 100644 --- a/obiba_opal/security.py +++ b/obiba_opal/security.py @@ -1,5 +1,6 @@ import obiba_opal.core as core + class EncryptService: """ Encryption by Opal. @@ -7,7 +8,7 @@ class EncryptService: @classmethod def add_arguments(cls, parser): - parser.add_argument('plain', help='Plain text to encrypt') + parser.add_argument("plain", help="Plain text to encrypt") @classmethod def do_command(cls, args): @@ -19,11 +20,14 @@ def do_command(cls, args): if args.verbose: request.verbose() - response = request.get().resource("/system/crypto/encrypt/" + args.plain).send() + response = ( + request.get().resource("/system/crypto/encrypt/" + args.plain).send() + ) print(response.content) finally: client.close() + class DecryptService: """ Decryption by Opal. @@ -31,7 +35,7 @@ class DecryptService: @classmethod def add_arguments(cls, parser): - parser.add_argument('encrypted', help='Encrypted text to decrypt') + parser.add_argument("encrypted", help="Encrypted text to decrypt") @classmethod def do_command(cls, args): @@ -43,7 +47,11 @@ def do_command(cls, args): if args.verbose: request.verbose() - response = request.get().resource("/system/crypto/decrypt/" + args.encrypted).send() + response = ( + request.get() + .resource("/system/crypto/decrypt/" + args.encrypted) + .send() + ) print(response.content) finally: client.close() diff --git a/obiba_opal/sql.py b/obiba_opal/sql.py index b3faf27..d72a9f9 100644 --- a/obiba_opal/sql.py +++ b/obiba_opal/sql.py @@ -5,6 +5,7 @@ import obiba_opal.core as core import urllib.parse + class SQLService: """ Execute SQL queries. @@ -15,11 +16,31 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=False, help='Source project name, that will be used to resolve the table names in the FROM statement. If not provided, the fully qualified table names must be specified in the query (escaped by backquotes: `.
`).') - parser.add_argument('--query', '-q', required=True, help='SQL query') - parser.add_argument('--format', '-f', required=False, help='The format of the output, can be "json" or "csv". Default is "csv".') - parser.add_argument('--id-name', '-in', required=False, help='Name of the ID column name. Default is "_id".') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--project", + "-pr", + required=False, + help="Source project name, that will be used to resolve the table names in the FROM statement. If not provided, the fully qualified table names must be specified in the query (escaped by backquotes: `.
`).", + ) + parser.add_argument("--query", "-q", required=True, help="SQL query") + parser.add_argument( + "--format", + "-f", + required=False, + help='The format of the output, can be "json" or "csv". Default is "csv".', + ) + parser.add_argument( + "--id-name", + "-in", + required=False, + help='Name of the ID column name. Default is "_id".', + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -31,18 +52,18 @@ def do_command(cls, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: if args.project: - builder = core.UriBuilder(['datasource', args.project, '_sql']) + builder = core.UriBuilder(["datasource", args.project, "_sql"]) else: - builder = core.UriBuilder(['datasources', '_sql']) - if args.format == 'json' and args.id_name: - builder.query('id', args.id_name) + builder = core.UriBuilder(["datasources", "_sql"]) + if args.format == "json" and args.id_name: + builder.query("id", args.id_name) uri = builder.build() request = client.new_request() if args.verbose: request.verbose() request.fail_on_error() - if args.format == 'json': + if args.format == "json": request.accept_json().content_type_text_plain() response = request.post().resource(uri).content(args.query).send() # output to stdout @@ -52,9 +73,9 @@ def do_command(cls, args): print(str(response)) else: request.accept_text_csv().content_type_form_urlencoded() - body = 'query=' + urllib.parse.quote(args.query) + body = "query=" + urllib.parse.quote(args.query) if args.id_name: - body = body + '&id=' + urllib.parse.quote(args.id_name) + body = body + "&id=" + urllib.parse.quote(args.id_name) response = request.post().resource(uri).content(body).send() # output to stdout print(str(response)) @@ -72,11 +93,36 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=False, help='Project name used as the SQL execution context to filter. If not specified, history from any context is returned. If \'*\' is specified, history of SQL execution without context is returned.') - parser.add_argument('--offset', '-os', required=False, help='Number of history items to skip. Default is 0 (note that the items are ordered by most recent first).') - parser.add_argument('--limit', '-lm', required=False, help='Maximum number of history items to return. Default is 100.') - parser.add_argument('--subject', '-sb', required=False, help='Filter by user name, only administrators can retrieve SQL history of other users. If \'*\' is specified, history of all users is retrieved. Default is the current user name.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--project", + "-pr", + required=False, + help="Project name used as the SQL execution context to filter. If not specified, history from any context is returned. If '*' is specified, history of SQL execution without context is returned.", + ) + parser.add_argument( + "--offset", + "-os", + required=False, + help="Number of history items to skip. Default is 0 (note that the items are ordered by most recent first).", + ) + parser.add_argument( + "--limit", + "-lm", + required=False, + help="Maximum number of history items to return. Default is 100.", + ) + parser.add_argument( + "--subject", + "-sb", + required=False, + help="Filter by user name, only administrators can retrieve SQL history of other users. If '*' is specified, history of all users is retrieved. Default is the current user name.", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -88,15 +134,19 @@ def do_command(cls, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: if args.subject and args.subject != args.user: - builder = core.UriBuilder(['system', 'subject-profile', args.subject, 'sql-history']) + builder = core.UriBuilder( + ["system", "subject-profile", args.subject, "sql-history"] + ) else: - builder = core.UriBuilder(['system', 'subject-profile', '_current', 'sql-history']) + builder = core.UriBuilder( + ["system", "subject-profile", "_current", "sql-history"] + ) if args.project: - builder.query('datasource', args.project) + builder.query("datasource", args.project) if args.offset: - builder.query('offset', args.offset) + builder.query("offset", args.offset) if args.limit: - builder.query('limit', args.limit) + builder.query("limit", args.limit) uri = builder.build() request = client.new_request() if args.verbose: @@ -109,4 +159,4 @@ def do_command(cls, args): else: print(str(response)) finally: - client.close() \ No newline at end of file + client.close() diff --git a/obiba_opal/subjects.py b/obiba_opal/subjects.py index f2bba63..96da781 100755 --- a/obiba_opal/subjects.py +++ b/obiba_opal/subjects.py @@ -5,6 +5,7 @@ import obiba_opal.core as core import json + class UserService: """ Users management service. @@ -19,20 +20,58 @@ def add_arguments(self, parser): """ Add data command specific options """ - parser.add_argument('--name', '-n', required=False, help='User name.') - parser.add_argument('--upassword', '-upa', required=False, help='User password of at least 8 characters, must contain at least one digit, one upper case alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space.') - parser.add_argument('--ucertificate', '-uc', required=False, help='User certificate (public key) file') - parser.add_argument('--disabled', '-di', action='store_true', required=False, - help='Disable user account (if omitted the user is enabled by default).') - parser.add_argument('--groups', '-g', nargs='+', required=False, help='User groups') - - parser.add_argument('--fetch', '-fe', action='store_true', required=False, - help='Fetch one or multiple user(s).') - parser.add_argument('--add', '-a', action='store_true', help='Add a user.') - parser.add_argument('--update', '-ud', action='store_true', required=False, help='Update a user.') - parser.add_argument('--delete', '-de', action='store_true', required=False, - help='Delete a user.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--name", "-n", required=False, help="User name.") + parser.add_argument( + "--upassword", + "-upa", + required=False, + help="User password of at least 8 characters, must contain at least one digit, one upper case alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space.", + ) + parser.add_argument( + "--ucertificate", + "-uc", + required=False, + help="User certificate (public key) file", + ) + parser.add_argument( + "--disabled", + "-di", + action="store_true", + required=False, + help="Disable user account (if omitted the user is enabled by default).", + ) + parser.add_argument( + "--groups", "-g", nargs="+", required=False, help="User groups" + ) + + parser.add_argument( + "--fetch", + "-fe", + action="store_true", + required=False, + help="Fetch one or multiple user(s).", + ) + parser.add_argument("--add", "-a", action="store_true", help="Add a user.") + parser.add_argument( + "--update", + "-ud", + action="store_true", + required=False, + help="Update a user.", + ) + parser.add_argument( + "--delete", + "-de", + action="store_true", + required=False, + help="Delete a user.", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -44,9 +83,21 @@ def do_command(self, args): try: service = UserService(client, args.verbose) if args.add: - service.add_user(args.name, args.upassword, args.ucertificate, args.groups, args.disabled) + service.add_user( + args.name, + args.upassword, + args.ucertificate, + args.groups, + args.disabled, + ) elif args.update: - service.update_user(args.name, args.upassword, args.ucertificate, args.groups, args.disabled) + service.update_user( + args.name, + args.upassword, + args.ucertificate, + args.groups, + args.disabled, + ) elif args.delete: service.delete_user(args.name) else: @@ -69,7 +120,7 @@ def get_users(self) -> dict: request.verbose() response = request.get().resource(self._make_ws()).send() return response.from_json() - + def get_user(self, name: str, fail_safe: bool = True) -> dict: """ Get a user. @@ -78,7 +129,7 @@ def get_user(self, name: str, fail_safe: bool = True) -> dict: :param fail_safe: When user is not found, return None (True is default) else raise error """ if not name: - raise ValueError('The name of the user to fetch is required') + raise ValueError("The name of the user to fetch is required") request = self.client.new_request() if self.verbose: request.verbose() @@ -86,7 +137,7 @@ def get_user(self, name: str, fail_safe: bool = True) -> dict: request.fail_on_error() response = request.get().resource(self._make_ws(name)).send() return response.from_json() if response.code == 200 else None - + def delete_user(self, name: str): """ Delete a user. @@ -94,14 +145,21 @@ def delete_user(self, name: str): :param name: The user name """ if not name: - raise ValueError('The name of the user to delete is required') + raise ValueError("The name of the user to delete is required") request = self.client.new_request() request.fail_on_error() if self.verbose: request.verbose() request.delete().resource(self._make_ws(name)).send() - def update_user(self, name: str, upassword: str = None, ucertificate: str = None, groups: list = [], disabled: bool = False): + def update_user( + self, + name: str, + upassword: str = None, + ucertificate: str = None, + groups: list = None, + disabled: bool = False, + ): """ Update a user. @@ -112,41 +170,50 @@ def update_user(self, name: str, upassword: str = None, ucertificate: str = None :param disabled: Not enabled """ if not name: - raise ValueError('The name of the user to update is required') + raise ValueError("The name of the user to update is required") userInfo = self.get_user(name) - user = {'name': name} + user = {"name": name} request = self.client.new_request() request.fail_on_error() if self.verbose: request.verbose() - + if upassword: - if userInfo['authenticationType'] == "CERTIFICATE": - raise ValueError("%s requires a certificate (public key) file" % user.name) + if userInfo["authenticationType"] == "CERTIFICATE": + raise ValueError( + f"{user['name']} requires a certificate (public key) file" + ) if len(upassword) < 8: - raise ValueError('Password must contain at least 8 characters.') - user['authenticationType'] = 'PASSWORD' - user['password'] = upassword + raise ValueError("Password must contain at least 8 characters.") + user["authenticationType"] = "PASSWORD" + user["password"] = upassword elif ucertificate: - if userInfo['authenticationType'] == "PASSWORD": - raise ValueError("%s requires a password" % user.name) + if userInfo["authenticationType"] == "PASSWORD": + raise ValueError(f"{user['name']} requires a password") - user['authenticationType'] = 'CERTIFICATE' - with open(ucertificate, 'rb') as cert: - user['certificate'] = cert.read() + user["authenticationType"] = "CERTIFICATE" + with open(ucertificate, "rb") as cert: + user["certificate"] = cert.read() else: - user['authenticationType'] = userInfo['authenticationType'] + user["authenticationType"] = userInfo["authenticationType"] - user['enabled'] = not disabled + user["enabled"] = not disabled if groups: - user['groups'] = groups + user["groups"] = groups request.fail_on_error().accept_json().content_type_json() request.put().resource(self._make_ws(name)).content(json.dumps(user)).send() - def add_user(self, name: str, upassword: str = None, ucertificate: str = None, groups: list = [], disabled: bool = False): + def add_user( + self, + name: str, + upassword: str = None, + ucertificate: str = None, + groups: list = None, + disabled: bool = False, + ): """ Add a user. @@ -157,31 +224,31 @@ def add_user(self, name: str, upassword: str = None, ucertificate: str = None, g :param disabled: Not enabled """ if not name: - raise ValueError('The name of the user to add is required') + raise ValueError("The name of the user to add is required") if not upassword and not ucertificate: - raise ValueError('The user password or a certificate file is required.') - + raise ValueError("The user password or a certificate file is required.") + request = self.client.new_request() request.fail_on_error() if self.verbose: request.verbose() - + # create user - user = {'name': name} + user = {"name": name} if upassword: if len(upassword) < 8: - raise Exception('Password must contain at least 8 characters.') - user['authenticationType'] = 'PASSWORD' - user['password'] = upassword + raise Exception("Password must contain at least 8 characters.") + user["authenticationType"] = "PASSWORD" + user["password"] = upassword else: - user['authenticationType'] = 'CERTIFICATE' - with open(ucertificate, 'rb') as cert: - user['certificate'] = cert.read() + user["authenticationType"] = "CERTIFICATE" + with open(ucertificate, "rb") as cert: + user["certificate"] = cert.read() if disabled: - user['enabled'] = False + user["enabled"] = False if groups: - user['groups'] = groups + user["groups"] = groups request.fail_on_error().accept_json().content_type_json() request.post().resource(self._make_ws()).content(json.dumps(user)).send() @@ -191,11 +258,12 @@ def _make_ws(self, name: str = None): Build the web service resource path """ if not name: - ws = '/system/subject-credentials' + ws = "/system/subject-credentials" else: - ws = '/system/subject-credential/%s' % name + ws = f"/system/subject-credential/{name}" return ws + class GroupService: """ Groups management service. @@ -210,13 +278,27 @@ def add_arguments(self, parser): """ Add data command specific options """ - parser.add_argument('--name', '-n', required=False, - help='Group name.') - parser.add_argument('--fetch', '-fe', action='store_true', required=False, - help='Fetch one or multiple group(s).') - parser.add_argument('--delete', '-de', action='store_true', required=False, - help='Delete a group.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument("--name", "-n", required=False, help="Group name.") + parser.add_argument( + "--fetch", + "-fe", + action="store_true", + required=False, + help="Fetch one or multiple group(s).", + ) + parser.add_argument( + "--delete", + "-de", + action="store_true", + required=False, + help="Delete a group.", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(self, args): @@ -254,11 +336,11 @@ def get_groups(self) -> list: def get_group(self, name: str) -> dict: """ Get a specific group. - + :param name: The name of the group """ if not name: - raise ValueError('The name of the group to fetch is required') + raise ValueError("The name of the group to fetch is required") request = self.client.new_request() request.fail_on_error() @@ -266,15 +348,15 @@ def get_group(self, name: str) -> dict: request.verbose() response = request.get().resource(self._make_ws(name)).send() return response.from_json() - + def delete_group(self, name: str): """ Delete a specific group (does NOT delete the users of the group). - + :param name: The name of the group """ if not name: - raise ValueError('The name of the group to delete is required') + raise ValueError("The name of the group to delete is required") request = self.client.new_request() request.fail_on_error() @@ -286,10 +368,6 @@ def _make_ws(self, name: str = None): """ Build the web service resource path """ - if name: - ws = '/system/group/%s' % name - else: - ws = '/system/groups' + ws = f"/system/group/{name}" if name else "/system/groups" return ws - diff --git a/obiba_opal/system.py b/obiba_opal/system.py index eca74e4..e0d8ff1 100644 --- a/obiba_opal/system.py +++ b/obiba_opal/system.py @@ -3,12 +3,12 @@ """ import obiba_opal.core as core -from typing import Union import ast import json import sys import time + class SystemService: """ Get some system information. @@ -19,16 +19,34 @@ def add_arguments(cls, parser): """ Add system command specific options """ - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') - - parser.add_argument('--version', action='store_true', required=False, - help='Opal version number') - parser.add_argument('--env', action='store_true', required=False, - help='Opal java execution environment (JVM related statistic properties') - parser.add_argument('--status', action='store_true', required=False, - help='Opal application status (JVM related dynamic properties)') - parser.add_argument('--conf', action='store_true', required=False, - help='Opal application configuration') + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) + + parser.add_argument( + "--version", action="store_true", required=False, help="Opal version number" + ) + parser.add_argument( + "--env", + action="store_true", + required=False, + help="Opal java execution environment (JVM related statistic properties", + ) + parser.add_argument( + "--status", + action="store_true", + required=False, + help="Opal application status (JVM related dynamic properties)", + ) + parser.add_argument( + "--conf", + action="store_true", + required=False, + help="Opal application configuration", + ) @classmethod def do_command(cls, args): @@ -51,7 +69,7 @@ def do_command(cls, args): response = request.send() # format response - res = response.content.decode('utf-8') + res = response.content.decode("utf-8") if args.json: res = response.pretty_json() @@ -89,23 +107,72 @@ def add_arguments(cls, parser): Add plugin command specific options """ - parser.add_argument('--list', '-ls', action='store_true', help='List the installed plugins.') - parser.add_argument('--updates', '-lu', action='store_true', help='List the installed plugins that can be updated.') - parser.add_argument('--available', '-la', action='store_true', help='List the new plugins that could be installed.') - parser.add_argument('--install', '-i', required=False, - help='Install a plugin by providing its name or name:version or a path to a plugin archive file (in Opal file system). If no version is specified, the latest version is installed. Requires system restart to be effective.') - parser.add_argument('--remove', '-rm', required=False, - help='Remove a plugin by providing its name. Requires system restart to be effective.') - parser.add_argument('--reinstate', '-ri', required=False, - help='Reinstate a plugin that was previously removed by providing its name.') - parser.add_argument('--fetch', '-f', required=False, help='Get the named plugin description.') - parser.add_argument('--configure', '-c', required=False, - help='Configure the plugin site properties. Usually requires to restart the associated service to be effective.') - parser.add_argument('--status', '-su', required=False, - help='Get the status of the service associated to the named plugin.') - parser.add_argument('--start', '-sa', required=False, help='Start the service associated to the named plugin.') - parser.add_argument('--stop', '-so', required=False, help='Stop the service associated to the named plugin.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--list", "-ls", action="store_true", help="List the installed plugins." + ) + parser.add_argument( + "--updates", + "-lu", + action="store_true", + help="List the installed plugins that can be updated.", + ) + parser.add_argument( + "--available", + "-la", + action="store_true", + help="List the new plugins that could be installed.", + ) + parser.add_argument( + "--install", + "-i", + required=False, + help="Install a plugin by providing its name or name:version or a path to a plugin archive file (in Opal file system). If no version is specified, the latest version is installed. Requires system restart to be effective.", + ) + parser.add_argument( + "--remove", + "-rm", + required=False, + help="Remove a plugin by providing its name. Requires system restart to be effective.", + ) + parser.add_argument( + "--reinstate", + "-ri", + required=False, + help="Reinstate a plugin that was previously removed by providing its name.", + ) + parser.add_argument( + "--fetch", "-f", required=False, help="Get the named plugin description." + ) + parser.add_argument( + "--configure", + "-c", + required=False, + help="Configure the plugin site properties. Usually requires to restart the associated service to be effective.", + ) + parser.add_argument( + "--status", + "-su", + required=False, + help="Get the status of the service associated to the named plugin.", + ) + parser.add_argument( + "--start", + "-sa", + required=False, + help="Start the service associated to the named plugin.", + ) + parser.add_argument( + "--stop", + "-so", + required=False, + help="Stop the service associated to the named plugin.", + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -122,41 +189,67 @@ def do_command(cls, args): request.verbose() if args.list: - response = request.get().resource('/plugins').send() + response = request.get().resource("/plugins").send() elif args.updates: - response = request.get().resource('/plugins/_updates').send() + response = request.get().resource("/plugins/_updates").send() elif args.available: - response = request.get().resource('/plugins/_available').send() + response = request.get().resource("/plugins/_available").send() elif args.install: - if args.install.startswith('/'): - response = request.post().resource('/plugins?file=' + args.install).send() + if args.install.startswith("/"): + response = ( + request.post().resource("/plugins?file=" + args.install).send() + ) else: - nameVersion = args.install.split(':') + nameVersion = args.install.split(":") if len(nameVersion) == 1: - response = request.post().resource('/plugins?name=' + nameVersion[0]).send() + response = ( + request.post() + .resource("/plugins?name=" + nameVersion[0]) + .send() + ) else: - response = request.post().resource( - '/plugins?name=' + nameVersion[0] + '&version=' + nameVersion[1]).send() + response = ( + request.post() + .resource( + "/plugins?name=" + + nameVersion[0] + + "&version=" + + nameVersion[1] + ) + .send() + ) elif args.fetch: - response = request.get().resource('/plugin/' + args.fetch).send() + response = request.get().resource("/plugin/" + args.fetch).send() elif args.configure: request.content_type_text_plain() - print('Enter plugin site properties (one property per line, Ctrl-D to end input):') + print( + "Enter plugin site properties (one property per line, Ctrl-D to end input):" + ) request.content(sys.stdin.read()) - response = request.put().resource('/plugin/' + args.configure + '/cfg').send() + response = ( + request.put().resource("/plugin/" + args.configure + "/cfg").send() + ) elif args.remove: - response = request.delete().resource('/plugin/' + args.remove).send() + response = request.delete().resource("/plugin/" + args.remove).send() elif args.reinstate: - response = request.put().resource('/plugin/' + args.reinstate).send() + response = request.put().resource("/plugin/" + args.reinstate).send() elif args.status: - response = request.get().resource('/plugin/' + args.status + '/service').send() + response = ( + request.get().resource("/plugin/" + args.status + "/service").send() + ) elif args.start: - response = request.put().resource('/plugin/' + args.start + '/service').send() + response = ( + request.put().resource("/plugin/" + args.start + "/service").send() + ) elif args.stop: - response = request.delete().resource('/plugin/' + args.stop + '/service').send() + response = ( + request.delete() + .resource("/plugin/" + args.stop + "/service") + .send() + ) # format response - res = response.content.decode('utf-8') + res = response.content.decode("utf-8") if args.json: res = response.pretty_json() @@ -172,8 +265,8 @@ class TaxonomyService: """ def __init__(self, client: core.OpalClient, verbose: bool = False): - self.client = client - self.verbose = verbose + self.client = client + self.verbose = verbose def __make_request(self): request = self.client.new_request() @@ -187,43 +280,76 @@ def __make_request(self): def download(self, name: str): resource = self.OpalTaxonomyResource(name) - request = self.__make_request().get().resource(resource.get_download_ws()).accept('text/plain') + request = ( + self.__make_request() + .get() + .resource(resource.get_download_ws()) + .accept("text/plain") + ) return request.send() def importFile(self, file: str, override: bool = False): - uri = core.UriBuilder(['system', 'conf', 'taxonomies', 'import', '_file']) \ - .query('file', file) \ - .query('override', str(override).lower()) \ - .build() - return self.__make_request() \ - .post() \ - .resource(uri) \ - .send() + uri = ( + core.UriBuilder(["system", "conf", "taxonomies", "import", "_file"]) + .query("file", file) + .query("override", str(override).lower()) + .build() + ) + return self.__make_request().post().resource(uri).send() def delete(self, name: str): - return self.__make_request().resource(self.OpalTaxonomyResource(name).get_ws()).delete().send() + return ( + self.__make_request() + .resource(self.OpalTaxonomyResource(name).get_ws()) + .delete() + .send() + ) def confirmAndDelete(self, name: str, rejectHandler): - confirmed = input('Delete the taxonomy {}? [y/N]: '.format(name)) - if confirmed == 'y': + confirmed = input(f"Delete the taxonomy {name}? [y/N]: ") + if confirmed == "y": return self.delete(name) return rejectHandler() def summaries(self): - return self.__make_request().get().resource('/system/conf/taxonomies/summaries').send() + return ( + self.__make_request() + .get() + .resource("/system/conf/taxonomies/summaries") + .send() + ) @classmethod def add_arguments(cls, parser): """ Add file command specific options """ - parser.add_argument('--download', '-dl', required=False, help='Download a taxonomy by name (YAML format).') - parser.add_argument('--import-file', '-if', required=False, help='Import a taxonomy from the provided Opal file path (YAML format).') - parser.add_argument('--delete', '-dt', required=False, help='Delete a taxonomy by name.') - parser.add_argument('--force', '-f', action='store_true', help='Skip confirmation.') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--download", + "-dl", + required=False, + help="Download a taxonomy by name (YAML format).", + ) + parser.add_argument( + "--import-file", + "-if", + required=False, + help="Import a taxonomy from the provided Opal file path (YAML format).", + ) + parser.add_argument( + "--delete", "-dt", required=False, help="Delete a taxonomy by name." + ) + parser.add_argument( + "--force", "-f", action="store_true", help="Skip confirmation." + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -245,8 +371,9 @@ def do_command(cls, args): if args.force: response = service.delete(args.delete) else: + def rejectHandler(): - print('Aborted.') + print("Aborted.") sys.exit(0) response = service.confirmAndDelete(args.delete, rejectHandler) @@ -254,11 +381,16 @@ def rejectHandler(): response = service.summaries() # format response - if args.json and not args.download and not args.delete and not args.import_file: + if ( + args.json + and not args.download + and not args.delete + and not args.import_file + ): print(response.pretty_json()) else: - # output to stdout as string - print(response) + # output to stdout as string + print(response) finally: client.close() @@ -272,10 +404,10 @@ def __init__(self, name): self.name = name def get_ws(self): - return '/system/conf/taxonomy/' + self.name + return "/system/conf/taxonomy/" + self.name def get_download_ws(self): - return '/system/conf/taxonomy/' + self.name + '/_download' + return "/system/conf/taxonomy/" + self.name + "/_download" class TaskService: @@ -292,14 +424,38 @@ def add_arguments(cls, parser): """ Add task command specific options """ - parser.add_argument('--id', required=False, - help='The task ID. If not provided, it will be read from the standard input (from the JSON representation of the task or a plain value).') - parser.add_argument('--show', '-sh', action='store_true', help='Show JSON representation of the task') - parser.add_argument('--status', '-st', action='store_true', help='Get the status of the task') - parser.add_argument('--wait', '-w', action='store_true', help='Wait for the task to complete (successfully or not)') - parser.add_argument('--cancel', '-c', action='store_true', help='Cancel the task') - parser.add_argument('--delete', '-d', action='store_true', help='Delete the task') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--id", + required=False, + help="The task ID. If not provided, it will be read from the standard input (from the JSON representation of the task or a plain value).", + ) + parser.add_argument( + "--show", + "-sh", + action="store_true", + help="Show JSON representation of the task", + ) + parser.add_argument( + "--status", "-st", action="store_true", help="Get the status of the task" + ) + parser.add_argument( + "--wait", + "-w", + action="store_true", + help="Wait for the task to complete (successfully or not)", + ) + parser.add_argument( + "--cancel", "-c", action="store_true", help="Cancel the task" + ) + parser.add_argument( + "--delete", "-d", action="store_true", help="Delete the task" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -312,19 +468,21 @@ def do_command(cls, args): service = TaskService(client, args.verbose) try: if not args.id: - id = sys.stdin.read().strip('\n') - if id.startswith('{'): - id = str(json.loads(id)['id']) + id = sys.stdin.read().strip("\n") + if id.startswith("{"): + id = str(json.loads(id)["id"]) args.id = id - if args.show or not (args.show or args.wait or args.status or args.cancel or args.delete): + if args.show or not ( + args.show or args.wait or args.status or args.cancel or args.delete + ): res = service.get_task(args.id) core.Formatter.print_json(res, args.json) if args.wait: status = service.wait_task(args.id) - print('\r\033[K' + status) + print("\r\033[K" + status) if args.status: - print(service.get_task(args.id)['status']) + print(service.get_task(args.id)["status"]) if args.cancel: service.cancel_task(args.id) if args.delete: @@ -332,36 +490,41 @@ def do_command(cls, args): finally: client.close() - def get_task(self, id: Union[str, int]): + def get_task(self, id: str | int): request = self._make_request() - request.get().resource('/shell/command/%s' % id) + request.get().resource(f"/shell/command/{id}") response = request.send() return response.from_json() - def delete_task(self, id: Union[str, int]): + def delete_task(self, id: str | int): request = self._make_request() - request.delete().resource('/shell/command/%s' % id).send() + request.delete().resource(f"/shell/command/{id}").send() - def cancel_task(self, id: Union[str, int]): + def cancel_task(self, id: str | int): request = self._make_request().content_type_text_plain() - request.content('CANCELED') - request.put().resource('/shell/command/%s/status' % id).send() + request.content("CANCELED") + request.put().resource(f"/shell/command/{id}/status").send() - def wait_task(self, id: Union[str, int]): + def wait_task(self, id: str | int): task = self.get_task(id) - while task['status'] not in ['SUCCEEDED', 'CANCELED', 'FAILED']: - if 'progress' in task: - progress = task['progress'] - if 'message' in progress: - sys.stdout.write('\r\033[K' + str(progress['percent']) + '% ' + progress['message']) + while task["status"] not in ["SUCCEEDED", "CANCELED", "FAILED"]: + if "progress" in task: + progress = task["progress"] + if "message" in progress: + sys.stdout.write( + "\r\033[K" + + str(progress["percent"]) + + "% " + + progress["message"] + ) else: - sys.stdout.write('\r\033[K' + str(progress['percent']) + '%') + sys.stdout.write("\r\033[K" + str(progress["percent"]) + "%") else: - sys.stdout.write('.') + sys.stdout.write(".") sys.stdout.flush() time.sleep(1) task = self.get_task(id) - return task['status'] + return task["status"] def _make_request(self): request = self.client.new_request() @@ -381,7 +544,7 @@ def __init__(self, client: core.OpalClient, verbose: bool = False): self.client = client self.verbose = verbose - def make_request(self, method: str, accept: str = None, headers = None): + def make_request(self, method: str, accept: str = None, headers=None): request = self.client.new_request() request.method(method) request.fail_on_error() @@ -400,17 +563,24 @@ def make_request(self, method: str, accept: str = None, headers = None): request.verbose() return request - def make_request_with_content_type(self, method: str, contentType: str, accept: str = None, headers = None, content: str = None): + def make_request_with_content_type( + self, + method: str, + contentType: str, + accept: str = None, + headers=None, + content: str = None, + ): request = self.make_request(method, accept, headers) if contentType: - request.content_type(contentType) + request.content_type(contentType) - if content is not None: - request.content(content) - else: - print('Enter content:') - request.content(sys.stdin.read()) + if content is not None: + request.content(content) + else: + print("Enter content:") + request.content(sys.stdin.read()) return request @@ -422,15 +592,40 @@ def add_arguments(cls, parser): """ Add REST command specific options """ - parser.add_argument('ws', help='Web service path, for instance: /datasource/xxx/table/yyy/variable/vvv') - parser.add_argument('--method', '-m', required=False, - help='HTTP method (default is GET, others are POST, PUT, DELETE, OPTIONS)') - parser.add_argument('--accept', '-a', required=False, help='Accept header (default is application/json)') - parser.add_argument('--content-type', '-ct', required=False, - help='Content-Type header (default is application/json)') - parser.add_argument('--headers', '-hs', required=False, - help='Custom headers in the form of: { "Key2": "Value2", "Key2": "Value2" }') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "ws", + help="Web service path, for instance: /datasource/xxx/table/yyy/variable/vvv", + ) + parser.add_argument( + "--method", + "-m", + required=False, + help="HTTP method (default is GET, others are POST, PUT, DELETE, OPTIONS)", + ) + parser.add_argument( + "--accept", + "-a", + required=False, + help="Accept header (default is application/json)", + ) + parser.add_argument( + "--content-type", + "-ct", + required=False, + help="Content-Type header (default is application/json)", + ) + parser.add_argument( + "--headers", + "-hs", + required=False, + help='Custom headers in the form of: { "Key2": "Value2", "Key2": "Value2" }', + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -440,22 +635,24 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) service = RESTService(client, args.verbose) - method = args.method if args.method else 'GET' + method = args.method if args.method else "GET" try: - if method in ['POST', 'PUT']: - request = service.make_request_with_content_type(args.method, args.content_type, args.accept, args.headers) + if method in ["POST", "PUT"]: + request = service.make_request_with_content_type( + args.method, args.content_type, args.accept, args.headers + ) else: - request = service.make_request(args.method, args.accept, args.headers) + request = service.make_request(args.method, args.accept, args.headers) # format response response = service.send_request(args.ws, request) - res = response.content.decode('utf-8') + res = response.content.decode("utf-8") if args.json: res = response.pretty_json() - elif args.method in ['OPTIONS']: - res = response.headers['Allow'] + elif args.method in ["OPTIONS"]: + res = response.headers["Allow"] # output to stdout print(res) diff --git a/obiba_opal/table.py b/obiba_opal/table.py index 03a582d..e87dbf3 100644 --- a/obiba_opal/table.py +++ b/obiba_opal/table.py @@ -23,15 +23,37 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Source project name') - parser.add_argument('--tables', '-t', nargs='+', required=False, - help='List of table names to be copied (default is all)') - parser.add_argument('--destination', '-d', required=True, help='Destination project name') - parser.add_argument('--name', '-na', required=False, - help='New table name (required if source and destination are the same, ignored if more than one table is to be copied)') - parser.add_argument('--incremental', '-i', action='store_true', help='Incremental copy') - parser.add_argument('--nulls', '-nu', action='store_true', help='Copy the null values') - parser.add_argument('--json', '-j', action='store_true', help='Pretty JSON formatting of the response') + parser.add_argument( + "--project", "-pr", required=True, help="Source project name" + ) + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=False, + help="List of table names to be copied (default is all)", + ) + parser.add_argument( + "--destination", "-d", required=True, help="Destination project name" + ) + parser.add_argument( + "--name", + "-na", + required=False, + help="New table name (required if source and destination are the same, ignored if more than one table is to be copied)", + ) + parser.add_argument( + "--incremental", "-i", action="store_true", help="Incremental copy" + ) + parser.add_argument( + "--nulls", "-nu", action="store_true", help="Copy the null values" + ) + parser.add_argument( + "--json", + "-j", + action="store_true", + help="Pretty JSON formatting of the response", + ) @classmethod def do_command(cls, args): @@ -41,13 +63,28 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = CopyTableCommand(client, args.verbose).copy_tables(args.project, args.tables, args.destination, args.name, args.incremental, args.nulls) + res = CopyTableCommand(client, args.verbose).copy_tables( + args.project, + args.tables, + args.destination, + args.name, + args.incremental, + args.nulls, + ) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def copy_tables(self, project: str, tables: list, destination: str, name: str, incremental: bool, nulls: bool) -> dict: + def copy_tables( + self, + project: str, + tables: list, + destination: str, + name: str, + incremental: bool, + nulls: bool, + ) -> dict: """ Execute copy data command @@ -61,10 +98,16 @@ def copy_tables(self, project: str, tables: list, destination: str, name: str, i tables_ = tables if not tables: tables_ = self._retrieve_datasource_tables(project) - copier = io.OpalCopier.build(client=self.client, datasource=project, tables=tables_, - destination=destination, name=name, - incremental=incremental, nulls=nulls, - verbose=self.verbose) + copier = io.OpalCopier.build( + client=self.client, + datasource=project, + tables=tables_, + destination=destination, + name=name, + incremental=incremental, + nulls=nulls, + verbose=self.verbose, + ) response = copier.submit() return response.from_json() @@ -72,15 +115,21 @@ def _retrieve_datasource_tables(self, project: str) -> list: request = self.client.new_request() if self.verbose: request.verbose() - response = request.fail_on_error().get().resource( - core.UriBuilder(['datasource', project, 'tables']).build()).send().from_json() + response = ( + request.fail_on_error() + .get() + .resource(core.UriBuilder(["datasource", project, "tables"]).build()) + .send() + .from_json() + ) tables = [] for table in response: - tables.append(str(table['name'])) + tables.append(str(table["name"])) return tables + class DeleteTableService: """ Delete some tables in a project. @@ -95,9 +144,19 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Project name to which the tables belong') - parser.add_argument('--tables', '-t', nargs='+', required=False, - help='List of table names which will be deleted (default is all)') + parser.add_argument( + "--project", + "-pr", + required=True, + help="Project name to which the tables belong", + ) + parser.add_argument( + "--tables", + "-t", + nargs="+", + required=False, + help="List of table names which will be deleted (default is all)", + ) @classmethod def do_command(cls, args): @@ -107,7 +166,9 @@ def do_command(cls, args): # Build and send requests client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - DictionaryService(client, args.verbose).delete_tables(args.project, args.tables) + DictionaryService(client, args.verbose).delete_tables( + args.project, args.tables + ) finally: client.close() @@ -126,12 +187,28 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Source project name') - parser.add_argument('--views', '-vw', nargs='+', required=False, - help='List of view names to be backed up (default is all)') - parser.add_argument('--output', '-out', required=False, help='Output directory name (default is current directory)') - parser.add_argument('--force', '-f', action='store_true', - help='Skip confirmation when overwriting the backup file.') + parser.add_argument( + "--project", "-pr", required=True, help="Source project name" + ) + parser.add_argument( + "--views", + "-vw", + nargs="+", + required=False, + help="List of view names to be backed up (default is all)", + ) + parser.add_argument( + "--output", + "-out", + required=False, + help="Output directory name (default is current directory)", + ) + parser.add_argument( + "--force", + "-f", + action="store_true", + help="Skip confirmation when overwriting the backup file.", + ) @classmethod def do_command(cls, args): @@ -142,13 +219,15 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - BackupViewService(client, args.verbose).backup_views(args.project, args.views, args.output, args.force) + BackupViewService(client, args.verbose).backup_views( + args.project, args.views, args.output, args.force + ) finally: client.close() def backup_view(self, project: str, view, outdir, force: bool): - outfile = view + '.json' - print('Backup of', view, 'in', outfile, '...') + outfile = view + ".json" + print("Backup of", view, "in", outfile, "...") outpath = os.path.join(outdir, outfile) @@ -156,19 +235,22 @@ def backup_view(self, project: str, view, outdir, force: bool): request.fail_on_error() if self.verbose: request.verbose() - response = request.get().resource(core.UriBuilder(['datasource', project, 'view', view]).build()).send() + response = ( + request.get() + .resource(core.UriBuilder(["datasource", project, "view", view]).build()) + .send() + ) dowrite = True if os.path.exists(outpath) and not force: dowrite = False confirmed = input('Overwrite the file "' + outpath + '"? [y/N]: ') - if confirmed == 'y': + if confirmed == "y": dowrite = True if dowrite: - out = open(outpath, 'w+') - out.write(str(response)) - out.close() + with open(outpath, "w+") as out: + out.write(str(response)) def backup_views(self, project: str, views: list, output: str, force: bool) -> list: """ @@ -193,19 +275,16 @@ def backup_views(self, project: str, views: list, output: str, force: bool) -> l safeviews.append(view) views_ = safeviews if not views_: - print('No views to backup in project', project) + print("No views to backup in project", project) else: # prepare output directory outdir = output - if not outdir: - outdir = os.getcwd() - else: - outdir = os.path.normpath(outdir) + outdir = os.getcwd() if not outdir else os.path.normpath(outdir) if self.verbose: - print('Output directory is', outdir) + print("Output directory is", outdir) if not os.path.exists(outdir): if self.verbose: - print('Creating output directory ...') + print("Creating output directory ...") os.makedirs(outdir) # backup each view @@ -218,12 +297,17 @@ def _retrieve_datasource_views(self, project: str) -> list: request.fail_on_error() if self.verbose: request.verbose() - response = request.get().resource(core.UriBuilder(['datasource', project, 'tables']).build()).send().from_json() + response = ( + request.get() + .resource(core.UriBuilder(["datasource", project, "tables"]).build()) + .send() + .from_json() + ) views = [] for table in response: - if 'viewLink' in table: - views.append(str(table['name'])) + if "viewLink" in table: + views.append(str(table["name"])) return views @@ -242,13 +326,28 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument('--project', '-pr', required=True, help='Destination project name') - parser.add_argument('--views', '-vw', nargs='+', required=False, - help='List of view names to be restored (default is all the JSON files that are found in the backup directory/zip archive)') - parser.add_argument('--input', '-in', required=False, - help='Input directory name or input zip file containing JSON views (default is current directory)') - parser.add_argument('--force', '-f', action='store_true', - help='Skip confirmation when overwriting an existing view.') + parser.add_argument( + "--project", "-pr", required=True, help="Destination project name" + ) + parser.add_argument( + "--views", + "-vw", + nargs="+", + required=False, + help="List of view names to be restored (default is all the JSON files that are found in the backup directory/zip archive)", + ) + parser.add_argument( + "--input", + "-in", + required=False, + help="Input directory name or input zip file containing JSON views (default is current directory)", + ) + parser.add_argument( + "--force", + "-f", + action="store_true", + help="Skip confirmation when overwriting an existing view.", + ) @classmethod def do_command(cls, args): @@ -261,21 +360,24 @@ def do_command(cls, args): service = RestoreViewService(client, args.verbose) service.restore_views(args.project, args.views, args.input, args.force) - def restore_views(self, project: str, views: list, input: str = None, force: bool = False): + def restore_views( + self, project: str, views: list, input: str = None, force: bool = False + ): obsviews = self._retrieve_datasource_views(project) # list input directory content indir = input - if not indir: - indir = os.getcwd() - else: - indir = os.path.normpath(indir) - print('Input directory is', indir) - - if indir.endswith('.zip'): - with zipfile.ZipFile(indir, 'r') as inzip: - for viewfile in [filename for filename in inzip.namelist() if - filename.endswith('.json') and (not views or filename[:-5] in views)]: + indir = os.getcwd() if not indir else os.path.normpath(indir) + print("Input directory is", indir) + + if indir.endswith(".zip"): + with zipfile.ZipFile(indir, "r") as inzip: + for viewfile in [ + filename + for filename in inzip.namelist() + if filename.endswith(".json") + and (not views or filename[:-5] in views) + ]: self._restore_zipped_view(project, obsviews, viewfile, inzip, force) else: for viewfile in self._list_json_files(indir, views): @@ -283,52 +385,72 @@ def restore_views(self, project: str, views: list, input: str = None, force: boo def _retrieve_datasource_views(self, project: str): request = self._make_request() - response = request.get().resource(core.UriBuilder(['datasource', project, 'tables']).build()).send().from_json() + response = ( + request.get() + .resource(core.UriBuilder(["datasource", project, "tables"]).build()) + .send() + .from_json() + ) views = [] for table in response: - if 'viewLink' in table: - views.append(str(table['name'])) + if "viewLink" in table: + views.append(str(table["name"])) return views - def _restore_view(self, project: str, obsviews: list, infile: str, force: bool = False): + def _restore_view( + self, project: str, obsviews: list, infile: str, force: bool = False + ): view = os.path.basename(infile[:-5]) # supposed to be a .json file path dowrite = True if view in obsviews and not force: dowrite = False confirmed = input('Overwrite the view "' + view + '"? [y/N]: ') - if confirmed == 'y': + if confirmed == "y": dowrite = True if dowrite: - print('Restore of', view, 'from', infile, '...') + print("Restore of", view, "from", infile, "...") - request = self._make_request() - with open(infile, 'r') as inf: + request = self._make_request() + with open(infile) as inf: request.content(inf.read()) request.content_type_json() if view in obsviews: request.put().resource( - core.UriBuilder(['datasource', project, 'view', view]).query('comment', 'restore-view').build()).send() + core.UriBuilder(["datasource", project, "view", view]) + .query("comment", "restore-view") + .build() + ).send() else: request.post().resource( - core.UriBuilder(['datasource', project, 'views']).query('comment', 'restore-view').build()).send() - - def _restore_zipped_view(self, project: str, obsviews: list, infile: str, zippedinput, force: bool = False): + core.UriBuilder(["datasource", project, "views"]) + .query("comment", "restore-view") + .build() + ).send() + + def _restore_zipped_view( + self, + project: str, + obsviews: list, + infile: str, + zippedinput, + force: bool = False, + ): view = infile[:-5] # supposed to be a .json file name dowrite = True if view in obsviews and not force: dowrite = False confirmed = input('Overwrite the view "' + view + '"? [y/N]: ') - if confirmed == 'y': + if confirmed == "y": dowrite = True if dowrite: - print('Restore of', view, 'from', infile, '...') + print("Restore of", view, "from", infile, "...") request = self._make_request() request.content(zippedinput.read(infile)) @@ -336,18 +458,22 @@ def _restore_zipped_view(self, project: str, obsviews: list, infile: str, zipped if view in obsviews: request.put().resource( - core.UriBuilder(['datasource', project, 'view', view]).query('comment', - 'restore-view').build()).send() + core.UriBuilder(["datasource", project, "view", view]) + .query("comment", "restore-view") + .build() + ).send() else: request.post().resource( - core.UriBuilder(['datasource', project, 'views']).query('comment', - 'restore-view').build()).send() + core.UriBuilder(["datasource", project, "views"]) + .query("comment", "restore-view") + .build() + ).send() def _list_json_files(self, dirref: str, basenames): matches = [] for root, dirnames, filenames in os.walk(dirref): for filename in filenames: - if filename.endswith('.json'): + if filename.endswith(".json"): if not basenames or filename[:-5] in basenames: matches.append(os.path.join(root, filename)) return matches @@ -358,4 +484,4 @@ def _make_request(self, fail_safe: bool = False): request.fail_on_error() if self.verbose: request.verbose() - return request \ No newline at end of file + return request diff --git a/pyproject.toml b/pyproject.toml index ab69894..9aa8b92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,9 @@ dependencies = [ test = [ "pytest>=7.2.2", ] +dev = [ + "ruff>=0.10.0", +] [project.scripts] opal = "obiba_opal.console:run" diff --git a/tests/test_core.py b/tests/test_core.py index 68bb982..15bc897 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -4,27 +4,29 @@ from os.path import exists from tests.utils import TEST_SERVER, TEST_USER, TEST_PASSWORD -class TestClass(unittest.TestCase): +class TestClass(unittest.TestCase): @classmethod def setup_class(cls): # Make sure to place your own certificate files - setattr(cls, 'SSL_CERTIFICATE', './resources/certificates/publickey.pem') - setattr(cls, 'SSL_KEY', './resources/certificates/privatekey.pem') + cls.SSL_CERTIFICATE = "./resources/certificates/publickey.pem" + cls.SSL_KEY = "./resources/certificates/privatekey.pem" def test_sendRestBadServer(self): # FIXME for some reason, the cookie_file is not removed (despite the os.remove() is called and os.path.exists() says it was removed) try: # this one will make a request to check if an OTP is needed - OpalClient.buildWithAuthentication(server='http://deadbeef:8080', user=TEST_USER, - password=TEST_PASSWORD) + OpalClient.buildWithAuthentication( + server="http://deadbeef:8080", user=TEST_USER, password=TEST_PASSWORD + ) assert False except Exception: assert True def test_sendRestBadCredentials(self): - client = OpalClient.buildWithAuthentication(server=TEST_SERVER, user='admin', - password=TEST_PASSWORD) + client = OpalClient.buildWithAuthentication( + server=TEST_SERVER, user="admin", password=TEST_PASSWORD + ) try: self.assertRaises(Exception, self.__sendSimpleRequest, client.new_request()) @@ -34,8 +36,9 @@ def test_sendRestBadCredentials(self): def test_sendRest(self): client = None try: - client = OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, - password=TEST_PASSWORD) + client = OpalClient.buildWithAuthentication( + server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD + ) self.__sendSimpleRequest(client.new_request()) except Exception as e: self.fail(e) @@ -47,9 +50,9 @@ def test_sendSecuredRest(self): if exists(self.SSL_CERTIFICATE): client = None try: - client = OpalClient.buildWithCertificate(server=TEST_SERVER, - cert=self.SSL_CERTIFICATE, - key=self.SSL_KEY) + client = OpalClient.buildWithCertificate( + server=TEST_SERVER, cert=self.SSL_CERTIFICATE, key=self.SSL_KEY + ) self.__sendSimpleRequest(client.new_request()) except Exception as e: self.fail(e) @@ -73,8 +76,11 @@ def test_validSslLoginInfo(self): if exists(self.SSL_CERTIFICATE): client = None try: - args = Namespace(opal=TEST_SERVER, ssl_cert=self.SSL_CERTIFICATE, - ssl_key=self.SSL_KEY) + args = Namespace( + opal=TEST_SERVER, + ssl_cert=self.SSL_CERTIFICATE, + ssl_key=self.SSL_KEY, + ) client = OpalClient.build(loginInfo=OpalClient.LoginInfo.parse(args)) self.__sendSimpleRequest(client.new_request()) except Exception as e: @@ -88,7 +94,7 @@ def test_invalidServerInfo(self): self.assertRaises(Exception, OpalClient.LoginInfo.parse, args) def test_invalidLoginInfo(self): - args = Namespace(opal=TEST_SERVER, usr='administrator', password=TEST_PASSWORD) + args = Namespace(opal=TEST_SERVER, usr="administrator", password=TEST_PASSWORD) self.assertRaises(Exception, OpalClient.LoginInfo.parse, args) def __sendSimpleRequest(self, request): @@ -98,7 +104,7 @@ def __sendSimpleRequest(self, request): # request.verbose() # send request - request.method('GET').resource('/projects') + request.method("GET").resource("/projects") response = request.send() # format response diff --git a/tests/test_data.py b/tests/test_data.py index b726c87..b9fffb1 100644 --- a/tests/test_data.py +++ b/tests/test_data.py @@ -1,12 +1,12 @@ from obiba_opal import DataService, EntityService from tests.utils import make_client -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -14,33 +14,33 @@ def teardown_class(cls): def test_entities(self): client = self.client - res = DataService(client).get_entities('CNSIM', 'CNSIM1') - assert type(res) == list + res = DataService(client).get_entities("CNSIM", "CNSIM1") + assert isinstance(res, list) assert len(res) == 2163 def test_valueset(self): client = self.client - res = DataService(client).get_valueset('CNSIM', 'CNSIM1', id='1604') - assert type(res['valueSets']) == list - assert res['valueSets'][0]['identifier'] == '1604' - assert len(res['valueSets'][0]['values']) == 11 - assert type(res['variables']) == list - assert len(res['variables']) == 11 + res = DataService(client).get_valueset("CNSIM", "CNSIM1", id="1604") + assert isinstance(res["valueSets"], list) + assert res["valueSets"][0]["identifier"] == "1604" + assert len(res["valueSets"][0]["values"]) == 11 + assert isinstance(res["variables"], list) + assert len(res["variables"]) == 11 def test_value(self): client = self.client - res = DataService(client).get_value('CNSIM', 'CNSIM1', 'GENDER', id='1604') - assert res['value'] == '1' + res = DataService(client).get_value("CNSIM", "CNSIM1", "GENDER", id="1604") + assert res["value"] == "1" def test_entity(self): client = self.client - res = EntityService(client).get_entity('1604') - assert type(res) == dict - assert res['entityType'] == 'Participant' - assert res['identifier'] == '1604' + res = EntityService(client).get_entity("1604") + assert isinstance(res, dict) + assert res["entityType"] == "Participant" + assert res["identifier"] == "1604" def test_entity_tables(self): client = self.client - res = EntityService(client).get_entity_tables('1604') - assert type(res) == list + res = EntityService(client).get_entity_tables("1604") + assert isinstance(res, list) assert len(res) > 0 diff --git a/tests/test_dictionary.py b/tests/test_dictionary.py index d74ed48..3215153 100644 --- a/tests/test_dictionary.py +++ b/tests/test_dictionary.py @@ -2,12 +2,12 @@ from tests.utils import make_client import io -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -15,50 +15,56 @@ def teardown_class(cls): def test_datasource(self): client = self.client - res = DictionaryService(client).get_datasource('CNSIM') - assert res['name'] == 'CNSIM' + res = DictionaryService(client).get_datasource("CNSIM") + assert res["name"] == "CNSIM" def test_datasources(self): client = self.client res = DictionaryService(client).get_datasources() - assert type(res) == list - assert 'CNSIM' in [x['name'] for x in res] + assert isinstance(res, list) + assert "CNSIM" in [x["name"] for x in res] def test_table(self): client = self.client - res = DictionaryService(client).get_table('CNSIM', 'CNSIM1') - assert res['name'] == 'CNSIM1' - assert res['datasourceName'] == 'CNSIM' - assert res['link'] == '/datasource/CNSIM/table/CNSIM1' + res = DictionaryService(client).get_table("CNSIM", "CNSIM1") + assert res["name"] == "CNSIM1" + assert res["datasourceName"] == "CNSIM" + assert res["link"] == "/datasource/CNSIM/table/CNSIM1" def test_tables(self): client = self.client - res = DictionaryService(client).get_tables('CNSIM') - assert type(res) == list - assert 'CNSIM1' in [x['name'] for x in res] + res = DictionaryService(client).get_tables("CNSIM") + assert isinstance(res, list) + assert "CNSIM1" in [x["name"] for x in res] def test_variable(self): client = self.client - res = DictionaryService(client).get_variable('CNSIM', 'CNSIM1', 'GENDER') - assert res['name'] == 'GENDER' - assert res['parentLink']['link'] == '/datasource/CNSIM/table/CNSIM1' + res = DictionaryService(client).get_variable("CNSIM", "CNSIM1", "GENDER") + assert res["name"] == "GENDER" + assert res["parentLink"]["link"] == "/datasource/CNSIM/table/CNSIM1" def test_variables(self): client = self.client - res = DictionaryService(client).get_variables('CNSIM', 'CNSIM1') - assert type(res) == list + res = DictionaryService(client).get_variables("CNSIM", "CNSIM1") + assert isinstance(res, list) assert len(res) == 11 def test_variable_annotations(self): client = self.client output = io.StringIO() - ExportAnnotationsService(client).export_variable_annotations('CLSA', 'Tracking_60min_R1', 'WGHTS_PROV_TRM', output, taxonomies=['Mlstr_area']) - rows = output.getvalue().split('\r\n') - rows = [line.split('\t') for line in rows if len(line) > 0] + ExportAnnotationsService(client).export_variable_annotations( + "CLSA", + "Tracking_60min_R1", + "WGHTS_PROV_TRM", + output, + taxonomies=["Mlstr_area"], + ) + rows = output.getvalue().split("\r\n") + rows = [line.split("\t") for line in rows if len(line) > 0] assert len(rows) == 3 assert len(rows[0]) == 6 row = rows[2] - assert row[0] == 'CLSA' - assert row[1] == 'Tracking_60min_R1' - assert row[2] == 'WGHTS_PROV_TRM' - assert row[3] == 'Mlstr_area' + assert row[0] == "CLSA" + assert row[1] == "Tracking_60min_R1" + assert row[2] == "WGHTS_PROV_TRM" + assert row[3] == "Mlstr_area" diff --git a/tests/test_exports.py b/tests/test_exports.py index c9b9d8e..fd03a8e 100644 --- a/tests/test_exports.py +++ b/tests/test_exports.py @@ -2,12 +2,12 @@ from tests.utils import make_client import random -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -17,9 +17,9 @@ def test_csv(self): client = self.client service = ExportCSVCommand(client) id = random.choice(list(range(1, 999, 1))) - output = '/tmp/test%s' % id - task = service.export_data('CNSIM', ['CNSIM1'], output) - assert task['command'] == 'copy' - assert 'id' in task - status = TaskService(client).wait_task(task['id']) - assert status in ['SUCCEEDED', 'CANCELED', 'FAILED'] \ No newline at end of file + output = f"/tmp/test{id}" + task = service.export_data("CNSIM", ["CNSIM1"], output) + assert task["command"] == "copy" + assert "id" in task + status = TaskService(client).wait_task(task["id"]) + assert status in ["SUCCEEDED", "CANCELED", "FAILED"] diff --git a/tests/test_file.py b/tests/test_file.py index 2ecc5ba..777d939 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -6,70 +6,70 @@ import shutil from uuid import uuid4 -class TestClass(unittest.TestCase): - TEST_FILE = '/tmp/data.csv' - TEST_ZIPPED_FILE = '/tmp/data.zip' - TEST_FILENAME = 'data.csv' - LOCAL_UPLOAD_FILE = '/tmp/data.csv' +class TestClass(unittest.TestCase): + TEST_FILE = "/tmp/data.csv" + TEST_ZIPPED_FILE = "/tmp/data.zip" + TEST_FILENAME = "data.csv" + LOCAL_UPLOAD_FILE = "/tmp/data.csv" - @classmethod - def setup_class(cls): - cls.service = FileService(make_client()) - suffix = uuid4().hex - cls.TEST_FILENAME = f'data_{suffix}.csv' - cls.TEST_FILE = f'/tmp/{cls.TEST_FILENAME}' - cls.TEST_ZIPPED_FILE = f'/tmp/data_{suffix}.zip' - cls.LOCAL_UPLOAD_FILE = f'/tmp/{cls.TEST_FILENAME}' + @classmethod + def setup_class(cls): + cls.service = FileService(make_client()) + suffix = uuid4().hex + cls.TEST_FILENAME = f"data_{suffix}.csv" + cls.TEST_FILE = f"/tmp/{cls.TEST_FILENAME}" + cls.TEST_ZIPPED_FILE = f"/tmp/data_{suffix}.zip" + cls.LOCAL_UPLOAD_FILE = f"/tmp/{cls.TEST_FILENAME}" - def test_1_fileUpload(self): - try: - shutil.copyfile('./tests/resources/data.csv', self.LOCAL_UPLOAD_FILE) - try: - self.service.upload_file(self.LOCAL_UPLOAD_FILE, '/tmp') - response = self.service.file_info(self.TEST_FILE) - if response['name'] == self.TEST_FILENAME: - assert True - else: - assert False - finally: - if os.path.exists(self.LOCAL_UPLOAD_FILE): - os.remove(self.LOCAL_UPLOAD_FILE) - except Exception as e: - assert False + def test_1_fileUpload(self): + try: + shutil.copyfile("./tests/resources/data.csv", self.LOCAL_UPLOAD_FILE) + try: + self.service.upload_file(self.LOCAL_UPLOAD_FILE, "/tmp") + response = self.service.file_info(self.TEST_FILE) + if response["name"] == self.TEST_FILENAME: + assert True + else: + assert False + finally: + if os.path.exists(self.LOCAL_UPLOAD_FILE): + os.remove(self.LOCAL_UPLOAD_FILE) + except Exception: + assert False - def test_2_fileDownload(self): - try: - outfile = open(self.TEST_FILE, "wb") - fd = outfile.fileno() - self.service.download_file(self.TEST_FILE, fd) - if os.path.exists(self.TEST_FILE): - os.remove(self.TEST_FILE) - assert True - else: - assert False - except Exception as e: - assert False + def test_2_fileDownload(self): + try: + with open(self.TEST_FILE, "wb") as outfile: + fd = outfile.fileno() + self.service.download_file(self.TEST_FILE, fd) + if os.path.exists(self.TEST_FILE): + os.remove(self.TEST_FILE) + assert True + else: + assert False + except Exception: + assert False - def test_3_fileDownloadWithPassword(self): - try: - outfile = open(self.TEST_ZIPPED_FILE, "wb") - fd = outfile.fileno() - self.service.download_file(self.TEST_FILE, fd, "12345678") - stat = os.stat(self.TEST_ZIPPED_FILE) - if stat.st_size > 0: - os.remove(self.TEST_ZIPPED_FILE) - assert True - else: - assert False - except Exception as e: - assert False + def test_3_fileDownloadWithPassword(self): + try: + with open(self.TEST_ZIPPED_FILE, "wb") as outfile: + fd = outfile.fileno() + self.service.download_file(self.TEST_FILE, fd, "12345678") + stat = os.stat(self.TEST_ZIPPED_FILE) + if stat.st_size > 0: + os.remove(self.TEST_ZIPPED_FILE) + assert True + else: + assert False + except Exception: + assert False - def test_4_deleteUpload(self): - try: - self.service.delete_file(self.TEST_FILE) - self.service.file_info(self.TEST_FILE) - except HTTPError as e: - assert e.code == 404 - except Exception as e: - assert False + def test_4_deleteUpload(self): + try: + self.service.delete_file(self.TEST_FILE) + self.service.file_info(self.TEST_FILE) + except HTTPError as e: + assert e.code == 404 + except Exception: + assert False diff --git a/tests/test_imports.py b/tests/test_imports.py index f91e6a4..33ced2f 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -4,12 +4,12 @@ import shutil import os -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -19,21 +19,21 @@ def test_csv(self): client = self.client fs = FileService(client) id = random.choice(list(range(1, 999, 1))) - inname = 'data%s' % id - inpath = '/tmp/%s.csv' % inname - shutil.copy('./tests/resources/data.csv', inpath) - fs.upload_file(inpath, '/tmp') + inname = f"data{id}" + inpath = f"/tmp/{inname}.csv" + shutil.copy("./tests/resources/data.csv", inpath) + fs.upload_file(inpath, "/tmp") os.remove(inpath) assert fs.file_info(inpath) is not None service = ImportCSVCommand(client) - task = service.import_data(inpath, 'CNSIM') - assert 'id' in task - status = TaskService(client).wait_task(task['id']) - assert status in ['SUCCEEDED', 'CANCELED', 'FAILED'] + task = service.import_data(inpath, "CNSIM") + assert "id" in task + status = TaskService(client).wait_task(task["id"]) + assert status in ["SUCCEEDED", "CANCELED", "FAILED"] fs.delete_file(inpath) dico = DictionaryService(client) - table = dico.get_table('CNSIM', inname) + table = dico.get_table("CNSIM", inname) assert table is not None - dico.delete_tables('CNSIM', [inname]) - ds = dico.get_datasource('CNSIM') - assert inname not in ds['table'] \ No newline at end of file + dico.delete_tables("CNSIM", [inname]) + ds = dico.get_datasource("CNSIM") + assert inname not in ds["table"] diff --git a/tests/test_perm.py b/tests/test_perm.py index 2152095..abe73a4 100644 --- a/tests/test_perm.py +++ b/tests/test_perm.py @@ -2,28 +2,27 @@ from tests.utils import make_client import random -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): cls.client.close() - + def test_table(self): client = self.client service = TablePermService(client) id = random.choice(list(range(1, 999, 1))) - name = 'pwel%s' % id - perms = service.get_perms('CNSIM', 'CNSIM1', 'user') + name = f"pwel{id}" + perms = service.get_perms("CNSIM", "CNSIM1", "user") assert len(perms) > 0 - service.add_perm('CNSIM', 'CNSIM1', name, 'user', 'view') - perms = service.get_perms('CNSIM', 'CNSIM1', 'user') - assert name in [x['subject']['principal'] for x in perms] - service.delete_perm('CNSIM', 'CNSIM1', name, 'user') - perms = service.get_perms('CNSIM', 'CNSIM1', 'user') - assert name not in [x['subject']['principal'] for x in perms] - \ No newline at end of file + service.add_perm("CNSIM", "CNSIM1", name, "user", "view") + perms = service.get_perms("CNSIM", "CNSIM1", "user") + assert name in [x["subject"]["principal"] for x in perms] + service.delete_perm("CNSIM", "CNSIM1", name, "user") + perms = service.get_perms("CNSIM", "CNSIM1", "user") + assert name not in [x["subject"]["principal"] for x in perms] diff --git a/tests/test_project.py b/tests/test_project.py index 8baa1d3..454dfb6 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -2,12 +2,12 @@ from tests.utils import make_client import random -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -17,30 +17,32 @@ def test_project(self): client = self.client service = ProjectService(client) projects = service.get_projects() - assert type(projects) == list + assert isinstance(projects, list) assert len(projects) > 0 - assert 'CNSIM' in [x['name'] for x in projects] - project = service.get_project('CNSIM') - assert type(project) == dict - assert project['name'] == 'CNSIM' + assert "CNSIM" in [x["name"] for x in projects] + project = service.get_project("CNSIM") + assert isinstance(project, dict) + assert project["name"] == "CNSIM" def test_add_delete_project(self): client = self.client service = ProjectService(client) id = random.choice(list(range(1, 999, 1))) - name = 'test%s' % id + name = f"test{id}" service.add_project(name) project = service.get_project(name) - assert project['name'] == name + assert project["name"] == name service.delete_project(name) project = service.get_project(name) assert project is None def test_backup_command(self): client = self.client - res = BackupProjectCommand(client).backup_project('CNSIM', '/tmp/test', force=True) - assert res['command'] == 'backup' - assert res['name'] == 'backup' - assert res['project'] == 'CNSIM' - assert 'status' in res - assert 'id' in res + res = BackupProjectCommand(client).backup_project( + "CNSIM", "/tmp/test", force=True + ) + assert res["command"] == "backup" + assert res["name"] == "backup" + assert res["project"] == "CNSIM" + assert "status" in res + assert "id" in res diff --git a/tests/test_subjects.py b/tests/test_subjects.py index 0b97a64..6c199a0 100644 --- a/tests/test_subjects.py +++ b/tests/test_subjects.py @@ -2,12 +2,12 @@ from tests.utils import make_client import random -class TestClass: +class TestClass: @classmethod def setup_class(cls): client = make_client() - setattr(cls, 'client', client) + cls.client = client @classmethod def teardown_class(cls): @@ -17,25 +17,24 @@ def test_user_group(self): client = self.client service = UserService(client) id = random.choice(list(range(1, 999, 1))) - name = 'shadow%s' % id - grp = 'chats%s' % id - upassword = 'aA1aaa@a' - service.add_user(name, upassword, groups = [grp], disabled=True) + name = f"shadow{id}" + grp = f"chats{id}" + upassword = "aA1aaa@a" + service.add_user(name, upassword, groups=[grp], disabled=True) user = service.get_user(name) - assert user['name'] == name - assert user['enabled'] == False - assert len(user['groups']) == 1 - assert user['groups'][0] == grp - assert user['authenticationType'] == 'PASSWORD' - + assert user["name"] == name + assert user["enabled"] == False + assert len(user["groups"]) == 1 + assert user["groups"][0] == grp + assert user["authenticationType"] == "PASSWORD" + gservice = GroupService(client) groups = gservice.get_groups() - assert grp in [x['name'] for x in groups] + assert grp in [x["name"] for x in groups] gservice.delete_group(grp) groups = gservice.get_groups() assert grp not in groups - + service.delete_user(name) user = service.get_user(name) assert user is None - \ No newline at end of file diff --git a/tests/test_taxonomy.py b/tests/test_taxonomy.py index 28bc9bd..80bedb2 100644 --- a/tests/test_taxonomy.py +++ b/tests/test_taxonomy.py @@ -2,78 +2,87 @@ from tests.utils import make_client from obiba_opal.system import TaxonomyService from obiba_opal.file import FileService -from obiba_opal.core import HTTPError -import shutil import os from uuid import uuid4 -class TestClass(unittest.TestCase): - - TEST_FILE = '/tmp/data.csv' - TEST_ZIPPED_FILE = '/tmp/data.zip' - TEST_TAXONOMY_FILENAME = 'OBiBa_taxonomyTest.yml' - TEST_TAXONOMY_FILE = '/tmp/OBiBa_taxonomyTest.yml' - LOCAL_TAXONOMY_FILE = '/tmp/OBiBa_taxonomyTest.yml' - TEST_TAXONOMY_NAME = 'OBiBa_taxonomyTest' - @classmethod - def setup_class(cls): - cls.service = TaxonomyService(make_client()) - suffix = uuid4().hex - cls.TEST_TAXONOMY_FILENAME = f'OBiBa_taxonomyTest_{suffix}.yml' - cls.TEST_TAXONOMY_FILE = f'/tmp/{cls.TEST_TAXONOMY_FILENAME}' - cls.LOCAL_TAXONOMY_FILE = f'/tmp/{cls.TEST_TAXONOMY_FILENAME}' - cls.TEST_TAXONOMY_NAME = f'OBiBa_taxonomyTest_{suffix}' - - def test_1_importFile(self): - try: - fileService = FileService(make_client()) - # Read and modify the taxonomy file to use randomized name - with open('./tests/resources/OBiBa_taxonomyTest.yml', 'r') as f: - content = f.read() - content = content.replace('"OBiBa_taxonomyTest"', f'"{self.TEST_TAXONOMY_NAME}"') - with open(self.LOCAL_TAXONOMY_FILE, 'w') as f: - f.write(content) - try: - fileService.upload_file(self.LOCAL_TAXONOMY_FILE, '/tmp') - response = fileService.file_info(self.TEST_TAXONOMY_FILE) - if response['name'] == self.TEST_TAXONOMY_FILENAME: - response = self.service.importFile(self.TEST_TAXONOMY_FILE, True) - fileService.delete_file(self.TEST_TAXONOMY_FILE) - assert response.code == 201 - else: - assert False - finally: - if os.path.exists(self.LOCAL_TAXONOMY_FILE): - os.remove(self.LOCAL_TAXONOMY_FILE) - except Exception as e: - assert False +class TestClass(unittest.TestCase): + TEST_FILE = "/tmp/data.csv" + TEST_ZIPPED_FILE = "/tmp/data.zip" + TEST_TAXONOMY_FILENAME = "OBiBa_taxonomyTest.yml" + TEST_TAXONOMY_FILE = "/tmp/OBiBa_taxonomyTest.yml" + LOCAL_TAXONOMY_FILE = "/tmp/OBiBa_taxonomyTest.yml" + TEST_TAXONOMY_NAME = "OBiBa_taxonomyTest" + @classmethod + def setup_class(cls): + cls.service = TaxonomyService(make_client()) + suffix = uuid4().hex + cls.TEST_TAXONOMY_FILENAME = f"OBiBa_taxonomyTest_{suffix}.yml" + cls.TEST_TAXONOMY_FILE = f"/tmp/{cls.TEST_TAXONOMY_FILENAME}" + cls.LOCAL_TAXONOMY_FILE = f"/tmp/{cls.TEST_TAXONOMY_FILENAME}" + cls.TEST_TAXONOMY_NAME = f"OBiBa_taxonomyTest_{suffix}" - def test_2_downloadTaxonomy(self): - try: - response = self.service.download(self.TEST_TAXONOMY_NAME) - assert response.code == 200 and self.TEST_TAXONOMY_NAME in str(response) + def test_1_importFile(self): + try: + fileService = FileService(make_client()) + # Read and modify the taxonomy file to use randomized name + with open("./tests/resources/OBiBa_taxonomyTest.yml") as f: + content = f.read() + content = content.replace( + '"OBiBa_taxonomyTest"', f'"{self.TEST_TAXONOMY_NAME}"' + ) + with open(self.LOCAL_TAXONOMY_FILE, "w") as f: + f.write(content) + try: + fileService.upload_file(self.LOCAL_TAXONOMY_FILE, "/tmp") + response = fileService.file_info(self.TEST_TAXONOMY_FILE) + if response["name"] == self.TEST_TAXONOMY_FILENAME: + response = self.service.importFile(self.TEST_TAXONOMY_FILE, True) + fileService.delete_file(self.TEST_TAXONOMY_FILE) + assert response.code == 201 + else: + assert False + finally: + if os.path.exists(self.LOCAL_TAXONOMY_FILE): + os.remove(self.LOCAL_TAXONOMY_FILE) + except Exception: + assert False - except Exception as e: - assert False + def test_2_downloadTaxonomy(self): + try: + response = self.service.download(self.TEST_TAXONOMY_NAME) + assert response.code == 200 and self.TEST_TAXONOMY_NAME in str(response) + except Exception: + assert False - def test_3_taxonomiesSummary(self): - try: - name = self.TEST_TAXONOMY_NAME - response = self.service.summaries() - assert response.code == 200 and len(list(filter(lambda t: t['name'] == name, response.from_json()['summaries']))) > 0 - except Exception as e: - assert False + def test_3_taxonomiesSummary(self): + try: + name = self.TEST_TAXONOMY_NAME + response = self.service.summaries() + assert ( + response.code == 200 + and len( + list( + filter( + lambda t: t["name"] == name, + response.from_json()["summaries"], + ) + ) + ) + > 0 + ) + except Exception: + assert False - def test_4_deleteTaxonomy(self): - try: - name = self.TEST_TAXONOMY_NAME - # keep around for interactive test - # response = self.service.confirmAndDelete(name, lambda: self.service.delete(name)) - response = self.service.delete(name) - assert response.code == 200 + def test_4_deleteTaxonomy(self): + try: + name = self.TEST_TAXONOMY_NAME + # keep around for interactive test + # response = self.service.confirmAndDelete(name, lambda: self.service.delete(name)) + response = self.service.delete(name) + assert response.code == 200 - except Exception as e: - assert False \ No newline at end of file + except Exception: + assert False diff --git a/tests/utils.py b/tests/utils.py index 1bac437..aa7cdd1 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,10 +1,12 @@ from obiba_opal import OpalClient -TEST_SERVER = 'https://opal-demo.obiba.org' +TEST_SERVER = "https://opal-demo.obiba.org" # TEST_SERVER = 'http://localhost:8080' -TEST_USER = 'administrator' -TEST_PASSWORD = 'password' +TEST_USER = "administrator" +TEST_PASSWORD = "password" def make_client(): - return OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD) + return OpalClient.buildWithAuthentication( + server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD + ) diff --git a/uv.lock b/uv.lock index 60f8436..55a808f 100644 --- a/uv.lock +++ b/uv.lock @@ -149,6 +149,9 @@ dependencies = [ ] [package.optional-dependencies] +dev = [ + { name = "ruff" }, +] test = [ { name = "pytest" }, ] @@ -157,9 +160,10 @@ test = [ requires-dist = [ { name = "pytest", marker = "extra == 'test'", specifier = ">=7.2.2" }, { name = "requests", specifier = ">=2.31.0" }, + { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.10.0" }, { name = "urllib3", specifier = ">=2.0" }, ] -provides-extras = ["test"] +provides-extras = ["test", "dev"] [[package]] name = "packaging" @@ -221,6 +225,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] +[[package]] +name = "ruff" +version = "0.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" }, + { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" }, + { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" }, + { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" }, + { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" }, + { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" }, + { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" }, + { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" }, + { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" }, + { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" }, + { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" }, + { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" }, + { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" }, +] + [[package]] name = "tomli" version = "2.4.0" From 3baf6eb1219db740698135dd98a17e033728d22c Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 17:21:41 +0100 Subject: [PATCH 3/9] Fix E501 line length issues in table.py --- obiba_opal/table.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/obiba_opal/table.py b/obiba_opal/table.py index e87dbf3..8f5db75 100644 --- a/obiba_opal/table.py +++ b/obiba_opal/table.py @@ -40,7 +40,8 @@ def add_arguments(cls, parser): "--name", "-na", required=False, - help="New table name (required if source and destination are the same, ignored if more than one table is to be copied)", + help="New table name (required if source and destination are the " + "same, ignored if more than one table is to be copied)", ) parser.add_argument( "--incremental", "-i", action="store_true", help="Incremental copy" @@ -91,7 +92,8 @@ def copy_tables( :param project: The project name :param tables: List of table names to be copied (default is all) :param destination: Destination project name - :param name: New table name (required if source and destination are the same, ignored if more than one table is to be copied) + :param name: New table name (required if source and destination are + the same, ignored if more than one table is to be copied) :param incremental: Incremental copy :param nulls: Copy the null values """ @@ -175,7 +177,8 @@ def do_command(cls, args): class BackupViewService: """ - Backup views of a project: download view's JSON representation and save it in a file, one for each view, for later restore. + Backup views of a project: download view's JSON representation and save + it in a file, one for each view, for later restore. """ def __init__(self, client: core.OpalClient, verbose: bool = False): @@ -213,7 +216,8 @@ def add_arguments(cls, parser): @classmethod def do_command(cls, args): """ - Retrieve table DTOs of the project, look for the views, download the views in JSON into a file in provided or current directory + Retrieve table DTOs of the project, look for the views, download the + views in JSON into a file in provided or current directory """ # Build and send request @@ -254,7 +258,8 @@ def backup_view(self, project: str, view, outdir, force: bool): def backup_views(self, project: str, views: list, output: str, force: bool) -> list: """ - Retrieve table DTOs of the project, look for the views, download the views in JSON into a file in provided or current directory + Retrieve table DTOs of the project, look for the views, download the + views in JSON into a file in provided or current directory :param client: Opal connection object :param project: The project name @@ -334,13 +339,15 @@ def add_arguments(cls, parser): "-vw", nargs="+", required=False, - help="List of view names to be restored (default is all the JSON files that are found in the backup directory/zip archive)", + help="List of view names to be restored (default is all the JSON " + "files that are found in the backup directory/zip archive)", ) parser.add_argument( "--input", "-in", required=False, - help="Input directory name or input zip file containing JSON views (default is current directory)", + help="Input directory name or input zip file containing JSON views " + "(default is current directory)", ) parser.add_argument( "--force", @@ -352,7 +359,8 @@ def add_arguments(cls, parser): @classmethod def do_command(cls, args): """ - Retrieve table DTOs of the project, look for the views, download the views in JSON into a file in provided or current directory + Retrieve table DTOs of the project, look for the views, download the + views in JSON into a file in provided or current directory """ # Build and send request From 135b4cb3fb1fd75798e34222528f9393fb9650ad Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 17:24:13 +0100 Subject: [PATCH 4/9] Fix E501 line length issues in system.py --- obiba_opal/system.py | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/obiba_opal/system.py b/obiba_opal/system.py index e0d8ff1..fc64346 100644 --- a/obiba_opal/system.py +++ b/obiba_opal/system.py @@ -126,19 +126,24 @@ def add_arguments(cls, parser): "--install", "-i", required=False, - help="Install a plugin by providing its name or name:version or a path to a plugin archive file (in Opal file system). If no version is specified, the latest version is installed. Requires system restart to be effective.", + help="Install a plugin by providing its name or name:version or a " + "path to a plugin archive file (in Opal file system). If no " + "version is specified, the latest version is installed. " + "Requires system restart to be effective.", ) parser.add_argument( "--remove", "-rm", required=False, - help="Remove a plugin by providing its name. Requires system restart to be effective.", + help="Remove a plugin by providing its name. Requires system " + "restart to be effective.", ) parser.add_argument( "--reinstate", "-ri", required=False, - help="Reinstate a plugin that was previously removed by providing its name.", + help="Reinstate a plugin that was previously removed by providing " + "its name.", ) parser.add_argument( "--fetch", "-f", required=False, help="Get the named plugin description." @@ -147,7 +152,8 @@ def add_arguments(cls, parser): "--configure", "-c", required=False, - help="Configure the plugin site properties. Usually requires to restart the associated service to be effective.", + help="Configure the plugin site properties. Usually requires to " + "restart the associated service to be effective.", ) parser.add_argument( "--status", @@ -223,7 +229,8 @@ def do_command(cls, args): elif args.configure: request.content_type_text_plain() print( - "Enter plugin site properties (one property per line, Ctrl-D to end input):" + "Enter plugin site properties (one property per line, " + "Ctrl-D to end input):" ) request.content(sys.stdin.read()) response = ( @@ -427,7 +434,9 @@ def add_arguments(cls, parser): parser.add_argument( "--id", required=False, - help="The task ID. If not provided, it will be read from the standard input (from the JSON representation of the task or a plain value).", + help="The task ID. If not provided, it will be read from the " + "standard input (from the JSON representation of the task or " + "a plain value).", ) parser.add_argument( "--show", @@ -594,7 +603,8 @@ def add_arguments(cls, parser): """ parser.add_argument( "ws", - help="Web service path, for instance: /datasource/xxx/table/yyy/variable/vvv", + help="Web service path, for instance: /datasource/xxx/table/yyy/" + "variable/vvv", ) parser.add_argument( "--method", @@ -618,7 +628,8 @@ def add_arguments(cls, parser): "--headers", "-hs", required=False, - help='Custom headers in the form of: { "Key2": "Value2", "Key2": "Value2" }', + help='Custom headers in the form of: { "Key2": "Value2", ' + '"Key2": "Value2" }', ) parser.add_argument( "--json", From 6d4e8197de45a3947d1ae70aa05c8af521c7c400 Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 17:56:00 +0100 Subject: [PATCH 5/9] Fix E501 line length issues --- .gitignore | 3 +- .ruff.toml | 5 +- obiba_opal/analysis.py | 24 +--- obiba_opal/console.py | 22 ++-- obiba_opal/core.py | 76 +++++-------- obiba_opal/data.py | 37 ++---- obiba_opal/dictionary.py | 69 ++++------- obiba_opal/exports.py | 95 ++++------------ obiba_opal/file.py | 20 +--- obiba_opal/imports.py | 240 ++++++++++++++++++--------------------- obiba_opal/io.py | 60 +++++----- obiba_opal/perm.py | 199 +++++++++----------------------- obiba_opal/project.py | 30 ++--- obiba_opal/security.py | 10 +- obiba_opal/sql.py | 31 +++-- obiba_opal/subjects.py | 17 ++- obiba_opal/system.py | 129 +++++---------------- obiba_opal/table.py | 85 ++++---------- tests/test_core.py | 15 +-- tests/test_project.py | 4 +- tests/test_taxonomy.py | 4 +- tests/utils.py | 4 +- 22 files changed, 387 insertions(+), 792 deletions(-) diff --git a/.gitignore b/.gitignore index 9db4e56..2727fde 100644 --- a/.gitignore +++ b/.gitignore @@ -32,4 +32,5 @@ coverage __pycache__ debug.py -.serena/ \ No newline at end of file +.serena/ +plans/ diff --git a/.ruff.toml b/.ruff.toml index e730c4d..30df81b 100644 --- a/.ruff.toml +++ b/.ruff.toml @@ -19,8 +19,9 @@ extend-exclude = [ # Assume Python 3.10. target-version = "py310" -# Same as Black. -line-length = 88 +# Line length with preview to format +line-length = 120 +preview = true [lint] # Enable flake8-bugbear rules diff --git a/obiba_opal/analysis.py b/obiba_opal/analysis.py index 9b3f0a7..d028c9d 100644 --- a/obiba_opal/analysis.py +++ b/obiba_opal/analysis.py @@ -44,9 +44,7 @@ def do_command(self, args): """ client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = AnalysisCommand(client, args.verbose).analyse( - args.project, args.config - ) + res = AnalysisCommand(client, args.verbose).analyse(args.project, args.config) # format response core.Formatter.print_json(res, args.json) finally: @@ -138,9 +136,7 @@ def do_command(self, args): try: fd = sys.stdout.fileno() if args.table is None: - ExportAnalysisService(client, args.verbose).export_project_analyses( - args.project, fd, args.all_results - ) + ExportAnalysisService(client, args.verbose).export_project_analyses(args.project, fd, args.all_results) elif args.analysis_id is None: ExportAnalysisService(client, args.verbose).export_table_analyses( args.project, args.table, fd, args.all_results @@ -166,9 +162,7 @@ def export_project_analyses(self, project: str, fd, all_results: bool = True): request.get().resource(self._make_ws(project, all_results=all_results)).send(fp) fp.flush() - def export_table_analyses( - self, project: str, table: str, fd, all_results: bool = True - ): + def export_table_analyses(self, project: str, table: str, fd, all_results: bool = True): """ Export project's analyses for a specific table in a zip file. @@ -179,14 +173,10 @@ def export_table_analyses( request = self.client.new_request() request.fail_on_error().accept("application/zip") fp = os.fdopen(fd, "wb") - request.get().resource( - self._make_ws(project, table, all_results=all_results) - ).send() + request.get().resource(self._make_ws(project, table, all_results=all_results)).send() fp.flush() - def export_table_analysis( - self, project: str, table: str, analysis_id: str, fd, all_results: bool = True - ): + def export_table_analysis(self, project: str, table: str, analysis_id: str, fd, all_results: bool = True): """ Export project's analysis for a specific table and analyis in a zip file. @@ -198,9 +188,7 @@ def export_table_analysis( request = self.client.new_request() request.fail_on_error().accept("application/zip") fp = os.fdopen(fd, "wb") - request.get().resource( - self._make_ws(project, table, analysis_id, all_results) - ).send() + request.get().resource(self._make_ws(project, table, analysis_id, all_results)).send() fp.flush() def _make_ws( diff --git a/obiba_opal/console.py b/obiba_opal/console.py index 0dfab03..d2cf351 100755 --- a/obiba_opal/console.py +++ b/obiba_opal/console.py @@ -102,9 +102,7 @@ def add_opal_arguments(parser): nargs="?", help="Credentials auth: user password (requires a user name)", ) - parser.add_argument( - "--token", "-tk", required=False, help="Token auth: User access token" - ) + parser.add_argument("--token", "-tk", required=False, help="Token auth: User access token") parser.add_argument( "--ssl-cert", "-sc", @@ -145,8 +143,7 @@ def run(): parser = argparse.ArgumentParser(description="Opal command line tool.") subparsers = parser.add_subparsers( title="sub-commands", - help="Available sub-commands. Use --help option on the sub-command " - "for more details.", + help="Available sub-commands. Use --help option on the sub-command for more details.", ) # Add subcommands @@ -188,24 +185,21 @@ def run(): add_subcommand( subparsers, "taxonomy", - "Manage taxonomies: list available taxonomies, download, import or " - "delete a taxonomy.", + "Manage taxonomies: list available taxonomies, download, import or delete a taxonomy.", TaxonomyService.add_arguments, TaxonomyService.do_command, ) add_subcommand( subparsers, "backup-project", - "Backup project data: tables (data export), views, resources, report " - "templates, files.", + "Backup project data: tables (data export), views, resources, report templates, files.", BackupProjectCommand.add_arguments, BackupProjectCommand.do_command, ) add_subcommand( subparsers, "restore-project", - "Restore project data: tables (data import), views, resources, report " - "templates, files.", + "Restore project data: tables (data import), views, resources, report templates, files.", RestoreProjectCommand.add_arguments, RestoreProjectCommand.do_command, ) @@ -268,8 +262,7 @@ def run(): add_subcommand( subparsers, "import-r-rds", - "Import data from a RDS file (single serialized R object, expected to " - "be a tibble, using R).", + "Import data from a RDS file (single serialized R object, expected to be a tibble, using R).", ImportRDSCommand.add_arguments, ImportRDSCommand.do_command, ) @@ -318,8 +311,7 @@ def run(): add_subcommand( subparsers, "import-annot", - "Apply data dictionary annotations specified in a file in CSV/TSV " - "format (see export-annot).", + "Apply data dictionary annotations specified in a file in CSV/TSV format (see export-annot).", ImportAnnotationsService.add_arguments, ImportAnnotationsService.do_command, ) diff --git a/obiba_opal/core.py b/obiba_opal/core.py index 28fc805..e302cac 100755 --- a/obiba_opal/core.py +++ b/obiba_opal/core.py @@ -7,9 +7,7 @@ import json import os from requests import Session, Request, Response -import urllib.error import urllib.parse -import urllib.request import urllib3 from functools import reduce from http import HTTPStatus @@ -79,9 +77,7 @@ def buildWithCertificate(cls, server, cert, key, no_ssl_verify: bool = False): return client @classmethod - def buildWithAuthentication( - cls, server, user, password, no_ssl_verify: bool = False - ): + def buildWithAuthentication(cls, server, user, password, no_ssl_verify: bool = False): """ Creates a client instance authenticated by a user/password @@ -170,9 +166,7 @@ def init_otp(self): val = input("Enter 6-digits code: ") # validate code and get the opalsid cookie for further requests request = self.new_request() - request.header(otp_header, val).accept_json().get().resource( - profile_url - ).send() + request.header(otp_header, val).accept_json().get().resource(profile_url).send() def verify(self, value): """ @@ -239,8 +233,7 @@ def parse(cls, args): data["key"] = argv["ssl_key"] else: raise ValueError( - "Invalid login information. Requires user-password or token " - "or certificate-key information" + "Invalid login information. Requires user-password or token or certificate-key information" ) cls.data = data @@ -385,9 +378,7 @@ def content_upload(self, filename): """ if self._verbose: print("* File Content:") - print( - "[file=" + filename + ", size=" + str(os.path.getsize(filename)) + "]" - ) + print("[file=" + filename + ", size=" + str(os.path.getsize(filename)) + "]") self._upload_file = filename return self @@ -508,9 +499,7 @@ def get_location(self): def extract_cookie_value(self, name: str) -> str | None: if "set-cookie" in self.response.headers: if isinstance(self.response.headers["set-cookie"], str): - return self._extract_cookie_single_value( - name, self.response.headers["set-cookie"] - ) + return self._extract_cookie_single_value(name, self.response.headers["set-cookie"]) else: for header in self.response.headers["set-cookie"]: rval = self._extract_cookie_single_value(name, header) @@ -589,9 +578,13 @@ def get_ws(self): elif self.is_table(): return self.get_table_ws() elif self.is_variables(): - return UriBuilder( - ["datasource", self.datasource, "table", self.table, "variables"] - ).build() + return UriBuilder([ + "datasource", + self.datasource, + "table", + self.table, + "variables", + ]).build() else: return self.get_variable_ws() @@ -599,16 +592,14 @@ def get_table_ws(self): return UriBuilder(["datasource", self.datasource, "table", self.table]).build() def get_variable_ws(self): - return UriBuilder( - [ - "datasource", - self.datasource, - "table", - self.table, - "variable", - self.variable, - ] - ).build() + return UriBuilder([ + "datasource", + self.datasource, + "table", + self.table, + "variable", + self.variable, + ]).build() class UriBuilder: @@ -636,11 +627,9 @@ def query(self, key, value): val = f"{value}" if isinstance(value, bool): val = val.lower() - self._params.update( - [ - (key, val), - ] - ) + self._params.update([ + (key, val), + ]) return self def __str__(self): @@ -648,19 +637,14 @@ def concat_segment(p, s): return f"{p}/{s}" def concat_params(k): - return "{}={}".format( - urllib.parse.quote(k), - urllib.parse.quote(str(self._params[k])), - ) + return f"{urllib.parse.quote(k)}={urllib.parse.quote(str(self._params[k]))}" def concat_query(q, p): return f"{q}&{p}" p = urllib.parse.quote("/" + reduce(concat_segment, self._path)) if len(self._params): - q = reduce( - concat_query, list(map(concat_params, list(self._params.keys()))) - ) + q = reduce(concat_query, list(map(concat_params, list(self._params.keys())))) return f"{p}?{q}" else: return p @@ -675,14 +659,8 @@ def __init__(self, response: OpalResponse, message: str = None): super().__init__(message if message else f"HTTP Error: {response.code}") self.code = response.code http_status = [x for x in list(HTTPStatus) if x.value == response.code][0] - self.message = ( - message if message else f"{http_status.phrase}: {http_status.description}" - ) - self.error = ( - response.from_json() - if response.content - else {"code": response.code, "status": self.message} - ) + self.message = message if message else f"{http_status.phrase}: {http_status.description}" + self.error = response.from_json() if response.content else {"code": response.code, "status": self.message} # case the reported error is not a dict if not isinstance(self.error, dict): self.error = {"code": response.code, "status": self.error} diff --git a/obiba_opal/data.py b/obiba_opal/data.py index ca04d26..67f7d4c 100755 --- a/obiba_opal/data.py +++ b/obiba_opal/data.py @@ -36,15 +36,13 @@ def add_arguments(self, parser): "--raw", "-r", action="store_true", - help="Get raw value, output to stdout, useful for downloading " - "a binary value", + help="Get raw value, output to stdout, useful for downloading a binary value", ) parser.add_argument( "--pos", "-po", required=False, - help="Position of the value to query in case of a repeatable " - "variable (starting at 0).", + help="Position of the value to query in case of a repeatable variable (starting at 0).", ) parser.add_argument( "--json", @@ -64,9 +62,7 @@ def do_command(self, args): fd = None if args.raw: fd = sys.stdout.fileno() - res = DataService(client, args.verbose)._get_data( - args.name, args.id, args.pos, fd - ) + res = DataService(client, args.verbose)._get_data(args.name, args.id, args.pos, fd) # format response core.Formatter.print_json(res, args.json) finally: @@ -91,9 +87,7 @@ def get_valueset(self, project: str, table: str, id: str) -> dict: """ return self._get_data(f"{project}.{table}", id) - def get_value( - self, project: str, table: str, variable: str, id: str, pos: str = None, fd=None - ) -> dict: + def get_value(self, project: str, table: str, variable: str, id: str, pos: str = None, fd=None) -> dict: """ Get the variable value of an entity in a project's table. @@ -187,8 +181,7 @@ def add_arguments(self, parser): "--tables", "-ta", action="store_true", - help="Get the list of tables in which the entity with given " - "identifier exists.", + help="Get the list of tables in which the entity with given identifier exists.", ) parser.add_argument( "--json", @@ -207,9 +200,7 @@ def do_command(self, args): try: res = None if args.tables: - res = EntityService(client, args.verbose).get_entity_tables( - args.id, args.type - ) + res = EntityService(client, args.verbose).get_entity_tables(args.id, args.type) else: res = EntityService(client, args.verbose).get_entity(args.id, args.type) @@ -223,13 +214,7 @@ def get_entity(self, id: str, type: str = None) -> dict: if self.verbose: request.verbose() # send request - response = ( - request.fail_on_error() - .accept_json() - .get() - .resource(self._make_ws(id, type, False)) - .send() - ) + response = request.fail_on_error().accept_json().get().resource(self._make_ws(id, type, False)).send() return response.from_json() def get_entity_tables(self, id: str, type: str = None) -> list: @@ -237,13 +222,7 @@ def get_entity_tables(self, id: str, type: str = None) -> list: if self.verbose: request.verbose() # send request - response = ( - request.fail_on_error() - .accept_json() - .get() - .resource(self._make_ws(id, type, True)) - .send() - ) + response = request.fail_on_error().accept_json().get().resource(self._make_ws(id, type, True)).send() return response.from_json() def _make_ws(self, id: str, type: str = None, tables: bool = False): diff --git a/obiba_opal/dictionary.py b/obiba_opal/dictionary.py index 45e65fc..98f46a0 100755 --- a/obiba_opal/dictionary.py +++ b/obiba_opal/dictionary.py @@ -7,9 +7,7 @@ import csv import sys import pprint -import urllib.error import urllib.parse -import urllib.request class DictionaryService: @@ -183,8 +181,7 @@ def _get_dictionary_as_excel(self, name: str) -> any: if not resolver.is_variables(): raise Exception( - "Excel data dictionaries must be for all variables, use " - "'.
:*' format for resource." + "Excel data dictionaries must be for all variables, use '.
:*' format for resource." ) request.get().resource(f"{resolver.get_ws()}/excel") @@ -221,23 +218,19 @@ def add_arguments(cls, parser): type=argparse.FileType("w"), default=sys.stdout, ) - parser.add_argument( - "--locale", "-l", required=False, help="Exported locale (default is none)" - ) + parser.add_argument("--locale", "-l", required=False, help="Exported locale (default is none)") parser.add_argument( "--separator", "-s", required=False, - help="Separator char for CSV/TSV format (default is the " - "tabulation character)", + help="Separator char for CSV/TSV format (default is the tabulation character)", ) parser.add_argument( "--taxonomies", "-tx", nargs="+", required=False, - help="The list of taxonomy names of interest (default is any that " - "are found in the variable attributes)", + help="The list of taxonomy names of interest (default is any that are found in the variable attributes)", ) @classmethod @@ -287,9 +280,7 @@ def export_variable_annotations( taxonomies: list = None, locale: str = None, ): - self._export_annotations( - f"{project}.{table}:{variable}", output, sep, taxonomies, locale - ) + self._export_annotations(f"{project}.{table}:{variable}", output, sep, taxonomies, locale) def _export_annotations( self, @@ -303,9 +294,7 @@ def _export_annotations( writer.writerow(["project", "table", "variable", "namespace", "name", "value"]) self._handle_item(writer, name, taxonomies, locale) - def _handle_item( - self, writer, name: str, taxonomies: list = None, locale: str = None - ): + def _handle_item(self, writer, name: str, taxonomies: list = None, locale: str = None): # print 'Handling ' + name request = self.client.new_request() request.fail_on_error().accept_json() @@ -337,13 +326,9 @@ def _handle_item( locale, ) if resolver.is_variable(): - self._handle_variable( - writer, resolver.datasource, resolver.table, res, taxonomies, locale - ) + self._handle_variable(writer, resolver.datasource, resolver.table, res, taxonomies, locale) - def _handle_datasource( - self, writer, datasourceObject, taxonomies: list = None, locale: str = None - ): + def _handle_datasource(self, writer, datasourceObject, taxonomies: list = None, locale: str = None): for table in datasourceObject["table"]: self._handle_item( writer, @@ -352,9 +337,7 @@ def _handle_datasource( locale, ) - def _handle_table( - self, writer, tableObject, taxonomies: list = None, locale: str = None - ): + def _handle_table(self, writer, tableObject, taxonomies: list = None, locale: str = None): self._handle_item( writer, tableObject["datasourceName"] + "." + tableObject["name"] + ":*", @@ -374,9 +357,7 @@ def _handle_variable( if "attributes" in variableObject: for attribute in variableObject["attributes"]: do_search = ( - "namespace" in attribute - and "locale" in attribute - and locale in attribute["locale"] + "namespace" in attribute and "locale" in attribute and locale in attribute["locale"] if locale else "namespace" in attribute and "locale" not in attribute ) @@ -410,8 +391,7 @@ def add_arguments(cls, parser): parser.add_argument( "--input", "-in", - help="CSV/TSV input file, typically the output of the " - '"export-annot" command (default is stdin)', + help='CSV/TSV input file, typically the output of the "export-annot" command (default is stdin)', type=argparse.FileType("r"), default=sys.stdin, ) @@ -425,15 +405,13 @@ def add_arguments(cls, parser): "--separator", "-s", required=False, - help="Separator char for CSV/TSV format (default is the " - "tabulation character)", + help="Separator char for CSV/TSV format (default is the tabulation character)", ) parser.add_argument( "--destination", "-d", required=False, - help="Destination datasource name (default is the one(s) " - "specified in the input file)", + help="Destination datasource name (default is the one(s) specified in the input file)", ) parser.add_argument( "--tables", @@ -448,8 +426,7 @@ def add_arguments(cls, parser): "-tx", nargs="+", required=False, - help="The list of taxonomy names of interest (default is any that " - "is found in the input file)", + help="The list of taxonomy names of interest (default is any that is found in the input file)", ) @classmethod @@ -494,16 +471,10 @@ def import_annotations( for name in value_map[datasource][table][namespace]: for value in value_map[datasource][table][namespace][name]: ds = destination if destination else datasource - variables = value_map[datasource][table][namespace][ - name - ][value] - self._annotate( - ds, table, namespace, name, value, variables, locale - ) - - def _annotate( - self, datasource, table, namespace, name, value, variables, locale: str = None - ): + variables = value_map[datasource][table][namespace][name][value] + self._annotate(ds, table, namespace, name, value, variables, locale) + + def _annotate(self, datasource, table, namespace, name, value, variables, locale: str = None): request = self.client.new_request() request.fail_on_error().accept_json() params = {"namespace": namespace, "name": name, "value": value} @@ -519,9 +490,7 @@ def _annotate( if self.verbose: request.verbose() - request.put().resource(builder.build()).content_type_form_urlencoded().content( - form - ).send() + request.put().resource(builder.build()).content_type_form_urlencoded().content(form).send() def _append_row(self, dictionary, row, tables=None, taxonomies=None): if row[0] not in dictionary: diff --git a/obiba_opal/exports.py b/obiba_opal/exports.py index 39f4b88..ce6a609 100644 --- a/obiba_opal/exports.py +++ b/obiba_opal/exports.py @@ -25,18 +25,14 @@ def add_arguments(cls, parser): required=True, help="The list of tables to be exported", ) - parser.add_argument( - "--name", "-n", required=True, help="Opal datasource plugin name" - ) + parser.add_argument("--name", "-n", required=True, help="Opal datasource plugin name") parser.add_argument( "--config", "-c", required=True, help="A JSON file containing the export configuration", ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--json", "-j", @@ -114,15 +110,9 @@ def add_arguments(cls, parser): required=True, help="The list of tables to be exported", ) - parser.add_argument( - "--output", "-out", required=True, help="Output directory name" - ) - parser.add_argument( - "--id-name", "-in", required=False, help="Name of the ID column name" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--output", "-out", required=True, help="Output directory name") + parser.add_argument("--id-name", "-in", required=False, help="Name of the ID column name") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--no-multilines", "-nl", @@ -212,15 +202,9 @@ def add_arguments(cls, parser): required=True, help="The list of tables to be exported", ) - parser.add_argument( - "--output", "-out", required=True, help="Output file name (.rds)" - ) - parser.add_argument( - "--id-name", "-in", required=False, help="Name of the ID column name" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--output", "-out", required=True, help="Output file name (.rds)") + parser.add_argument("--id-name", "-in", required=False, help="Name of the ID column name") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--no-multilines", "-nl", @@ -319,12 +303,8 @@ def add_arguments(cls, parser): required=True, help="Output file name (.sas7bdat or .xpt (Transport format))", ) - parser.add_argument( - "--id-name", "-in", required=False, help="Name of the ID column name" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--id-name", "-in", required=False, help="Name of the ID column name") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--no-multilines", "-nl", @@ -427,12 +407,8 @@ def add_arguments(cls, parser): required=True, help="Output file name (.sav or .zsav (compressed format))", ) - parser.add_argument( - "--id-name", "-in", required=False, help="Name of the ID column name" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--id-name", "-in", required=False, help="Name of the ID column name") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--no-multilines", "-nl", @@ -529,15 +505,9 @@ def add_arguments(cls, parser): required=True, help="The list of tables to be exported", ) - parser.add_argument( - "--output", "-out", required=True, help="Output file name (.dta)" - ) - parser.add_argument( - "--id-name", "-in", required=False, help="Name of the ID column name" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--output", "-out", required=True, help="Output file name (.dta)") + parser.add_argument("--id-name", "-in", required=False, help="Name of the ID column name") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--no-multilines", "-nl", @@ -630,12 +600,8 @@ def add_arguments(cls, parser): required=True, help="The list of tables to be exported", ) - parser.add_argument( - "--database", "-db", required=True, help="Name of the SQL database" - ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--database", "-db", required=True, help="Name of the SQL database") + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--json", "-j", @@ -651,17 +617,13 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose).export_data( - args.datasource, args.tables, args.database, args.identifiers - ) + res = cls(client, args.verbose).export_data(args.datasource, args.tables, args.database, args.identifiers) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data( - self, project: str, tables: list, database: str, identifiers: str = None - ): + def export_data(self, project: str, tables: list, database: str, identifiers: str = None): """ Export tables in a SQL database. @@ -711,9 +673,7 @@ def add_arguments(cls, parser): required=True, help="Output zip file name that will be exported", ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--json", "-j", @@ -729,17 +689,13 @@ def do_command(cls, args): # Check output filename extension client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = cls(client, args.verbose).export_data( - args.datasource, args.tables, args.output, args.identifiers - ) + res = cls(client, args.verbose).export_data(args.datasource, args.tables, args.output, args.identifiers) # format response core.Formatter.print_json(res, args.json) finally: client.close() - def export_data( - self, project: str, tables: list, output: str, identifiers: str = None - ) -> dict: + def export_data(self, project: str, tables: list, output: str, identifiers: str = None) -> dict: """ Export tables in an Opal archive file. @@ -786,9 +742,7 @@ def add_arguments(cls, parser): required=True, help="Project name from which genotypes data will be exported", ) - parser.add_argument( - "--vcf", "-vcf", nargs="+", required=True, help="List of VCF/BCF file names" - ) + parser.add_argument("--vcf", "-vcf", nargs="+", required=True, help="List of VCF/BCF file names") parser.add_argument( "--destination", "-d", @@ -807,8 +761,7 @@ def add_arguments(cls, parser): "--no-case-controls", "-nocc", action="store_true", - help="Do not include case control samples (only relevant if there " - "is a sample-participant mapping defined)", + help="Do not include case control samples (only relevant if there is a sample-participant mapping defined)", ) @classmethod diff --git a/obiba_opal/file.py b/obiba_opal/file.py index 9cdceac..807cc87 100755 --- a/obiba_opal/file.py +++ b/obiba_opal/file.py @@ -28,9 +28,7 @@ def add_arguments(self, parser): action="store_true", help="Download file, or folder (as a zip file).", ) - parser.add_argument( - "--download-password", "-dlp", help="Password to encrypt the file content." - ) + parser.add_argument("--download-password", "-dlp", help="Password to encrypt the file content.") parser.add_argument( "--upload", "-up", @@ -43,9 +41,7 @@ def add_arguments(self, parser): action="store_true", help="Delete a file on Opal file system.", ) - parser.add_argument( - "--force", "-f", action="store_true", help="Skip confirmation." - ) + parser.add_argument("--force", "-f", action="store_true", help="Skip confirmation.") parser.add_argument( "--json", "-j", @@ -65,9 +61,7 @@ def do_command(self, args): # send request if args.download or args.download_password: - service.download_file( - args.path, sys.stdout.fileno(), args.download_password - ) + service.download_file(args.path, sys.stdout.fileno(), args.download_password) else: if args.upload: service.upload_file(args.upload, args.path) @@ -76,9 +70,7 @@ def do_command(self, args): if args.force: service.delete_file(args.path) else: - confirmed = input( - 'Delete the file "' + args.path + '"? [y/N]: ' - ) + confirmed = input('Delete the file "' + args.path + '"? [y/N]: ') if confirmed == "y": service.delete_file(args.path) else: @@ -108,9 +100,7 @@ def download_file(self, path: str, fd, download_password: str = None): file = FileService.OpalFile(path) fp = os.fdopen(fd, "wb") - request.get().resource(file.get_ws()).accept("*/*").header( - "X-File-Key", download_password - ).send(fp) + request.get().resource(file.get_ws()).accept("*/*").header("X-File-Key", download_password).send(fp) fp.flush() def upload_file(self, upload: str, path: str): diff --git a/obiba_opal/imports.py b/obiba_opal/imports.py index 4ff9c39..71ca0f0 100644 --- a/obiba_opal/imports.py +++ b/obiba_opal/imports.py @@ -22,9 +22,7 @@ def add_arguments(cls, parser): """ Add import command specific options """ - parser.add_argument( - "--name", "-n", required=True, help="Opal datasource plugin name" - ) + parser.add_argument("--name", "-n", required=True, help="Opal datasource plugin name") parser.add_argument( "--config", "-c", @@ -146,18 +144,10 @@ def add_arguments(cls, parser): required=True, help="CSV file to import from the Opal filesystem.", ) - parser.add_argument( - "--characterSet", "-c", required=False, help="Character set." - ) - parser.add_argument( - "--separator", "-s", required=False, help="Field separator." - ) - parser.add_argument( - "--quote", "-q", required=False, help="Quotation mark character." - ) - parser.add_argument( - "--firstRow", "-f", type=int, required=False, help="From row." - ) + parser.add_argument("--characterSet", "-c", required=False, help="Character set.") + parser.add_argument("--separator", "-s", required=False, help="Field separator.") + parser.add_argument("--quote", "-q", required=False, help="Quotation mark character.") + parser.add_argument("--firstRow", "-f", type=int, required=False, help="From row.") parser.add_argument( "--valueType", "-vt", @@ -165,9 +155,7 @@ def add_arguments(cls, parser): help="Default value type (text, integer, decimal, boolean etc.). " 'When not specified, "text" is the default.', ) - parser.add_argument( - "--type", "-ty", required=True, help="Entity type (e.g. Participant)" - ) + parser.add_argument("--type", "-ty", required=True, help="Entity type (e.g. Participant)") # non specific import arguments io.add_import_arguments(parser) @@ -342,22 +330,19 @@ def add_arguments(cls, parser): "--url", "-ur", required=False, - help="LimeSurvey SQL database JDBC url (if not provided, plugin " - "defaults will be used).", + help="LimeSurvey SQL database JDBC url (if not provided, plugin defaults will be used).", ) parser.add_argument( "--uname", "-un", required=False, - help="LimeSurvey SQL database user name (if not provided, plugin " - "defaults will be used).", + help="LimeSurvey SQL database user name (if not provided, plugin defaults will be used).", ) parser.add_argument( "--pword", "-pwd", required=False, - help="LimeSurvey SQL database user password (if not provided, " - "plugin defaults will be used).", + help="LimeSurvey SQL database user password (if not provided, plugin defaults will be used).", ) parser.add_argument( "--prefix", @@ -420,18 +405,26 @@ def import_data( """ Import tables from a LimeSurvey database. - :param url: LimeSurvey SQL database JDBC url (if not provided, plugin defaults will be used) - :param uname: LimeSurvey SQL database user name (if not provided, plugin defaults will be used) - :param pword: LimeSurvey SQL database user password (if not provided, plugin defaults will be used) + :param url: LimeSurvey SQL database JDBC url (if not provided, plugin + defaults will be used) + :param uname: LimeSurvey SQL database user name (if not provided, plugin + defaults will be used) + :param pword: LimeSurvey SQL database user password (if not provided, + plugin defaults will be used) :param prefix: Table prefix (if not provided, plugin defaults will be used) - :param properties: SQL properties (if not provided, plugin defaults will be used) + :param properties: SQL properties (if not provided, plugin defaults will + be used) :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must + be mapped prior importation, default), "ignore" (ignore unknown + identifiers), "generate" (generate a system identifier for each + unknown identifier) + :param merge: Merge imported data dictionary with the destination one + (default is false, i.e. data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -444,9 +437,7 @@ def import_data( merge=merge, verbose=self.verbose, ) - extension_factory = self.OpalExtensionFactory( - url, uname, pword, prefix, properties - ) + extension_factory = self.OpalExtensionFactory(url, uname, pword, prefix, properties) response = importer.submit(extension_factory) return response.from_json() @@ -496,9 +487,7 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument( - "--ropal", "-ro", required=True, help="Remote Opal server base url" - ) + parser.add_argument("--ropal", "-ro", required=True, help="Remote Opal server base url") parser.add_argument( "--ruser", "-ru", @@ -517,9 +506,7 @@ def add_arguments(cls, parser): required=False, help="Remote personal access token (exclusive from user credentials)", ) - parser.add_argument( - "--rdatasource", "-rd", required=True, help="Remote datasource name" - ) + parser.add_argument("--rdatasource", "-rd", required=True, help="Remote datasource name") # non specific import arguments io.add_import_arguments(parser) @@ -529,9 +516,7 @@ def do_command(cls, args): Execute import data command """ if (args.rtoken and args.ruser) or (not args.rtoken and not args.ruser): - raise ValueError( - "Either specify token OR user credentials (user name and password)" - ) + raise ValueError("Either specify token OR user credentials (user name and password)") # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: @@ -576,14 +561,19 @@ def import_data( :param rdatasource: Remote project's datasource name :param ruser: Remote user name (exclusive from using token) :param rpassword: Remote user password (exclusive from using token) - :param rtoken: Remote personal access token (exclusive from user credentials) + :param rtoken: Remote personal access token (exclusive from user + credentials) :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must + be mapped prior importation, default), "ignore" (ignore unknown + identifiers), "generate" (generate a system identifier for each + unknown identifier) + :param merge: Merge imported data dictionary with the destination one + (default is false, i.e. data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -597,9 +587,7 @@ def import_data( verbose=self.verbose, ) # remote opal client factory - extension_factory = self.OpalExtensionFactory( - ropal, rdatasource, ruser, rpassword, rtoken - ) + extension_factory = self.OpalExtensionFactory(ropal, rdatasource, ruser, rpassword, rtoken) response = importer.submit(extension_factory) return response.from_json() @@ -645,14 +633,13 @@ def add_arguments(cls, parser): required=True, help="RDS file to import from the Opal filesystem.", ) - parser.add_argument( - "--type", "-ty", required=False, help="Entity type (e.g. Participant)" - ) + parser.add_argument("--type", "-ty", required=False, help="Entity type (e.g. Participant)") parser.add_argument( "--idVariable", "-iv", required=False, - help="R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers.", + help="R tibble column that provides the entity ID. If not specified, first column values " + "are considered to be the entity identifiers.", ) # non specific import arguments @@ -705,14 +692,18 @@ def import_data( :param path: File to import in Opal file system :param entityType: Entity type (e.g. Participant) - :param idVariable: R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers + :param idVariable: R tibble column that provides the entity ID. If not specified, first + column values are considered to be the entity identifiers :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a + system identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, + i.e. data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -772,17 +763,14 @@ def add_arguments(cls, parser): required=True, help="SAS/SAS Transport file to import from the Opal filesystem.", ) - parser.add_argument( - "--locale", "-l", required=False, help="SAS file locale (e.g. fr, en...)." - ) - parser.add_argument( - "--type", "-ty", required=False, help="Entity type (e.g. Participant)" - ) + parser.add_argument("--locale", "-l", required=False, help="SAS file locale (e.g. fr, en...).") + parser.add_argument("--type", "-ty", required=False, help="Entity type (e.g. Participant)") parser.add_argument( "--idVariable", "-iv", required=False, - help="SAS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + help="SAS variable that provides the entity ID. If not specified, first variable values are considered to " + "be the entity identifiers.", ) # non specific import arguments @@ -838,14 +826,18 @@ def import_data( :param path: File to import in Opal file system :param locale: SAS file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) - :param idVariable: R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers + :param idVariable: R tibble column that provides the entity ID. If not specified, first column + values are considered to be the entity identifiers :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system + identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data + dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -858,9 +850,7 @@ def import_data( merge, self.verbose, ) - extension_factory = self.OpalExtensionFactory( - path, locale, entityType, idVariable - ) + extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) response = importer.submit(extension_factory) return response.from_json() @@ -911,17 +901,14 @@ def add_arguments(cls, parser): required=True, help="SPSS file, optionally compressed, to import from the Opal filesystem.", ) - parser.add_argument( - "--locale", "-l", required=False, help="SPSS file locale (e.g. fr, en...)." - ) - parser.add_argument( - "--type", "-ty", required=False, help="Entity type (e.g. Participant)" - ) + parser.add_argument("--locale", "-l", required=False, help="SPSS file locale (e.g. fr, en...).") + parser.add_argument("--type", "-ty", required=False, help="Entity type (e.g. Participant)") parser.add_argument( "--idVariable", "-iv", required=False, - help="SPSS variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + help="SPSS variable that provides the entity ID. If not specified, first variable values " + "are considered to be the entity identifiers.", ) # non specific import arguments @@ -977,14 +964,18 @@ def import_data( :param path: File to import in Opal file system :param locale: SPSS file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) - :param idVariable: R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers + :param idVariable: R tibble column that provides the entity ID. If not specified, first column + values are considered to be the entity identifiers :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a + system identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data + dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -997,9 +988,7 @@ def import_data( merge, self.verbose, ) - extension_factory = self.OpalExtensionFactory( - path, locale, entityType, idVariable - ) + extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) response = importer.submit(extension_factory) return response.from_json() @@ -1050,17 +1039,14 @@ def add_arguments(cls, parser): required=True, help="Stata file to import from the Opal filesystem.", ) - parser.add_argument( - "--locale", "-l", required=False, help="Stata file locale (e.g. fr, en...)." - ) - parser.add_argument( - "--type", "-ty", required=False, help="Entity type (e.g. Participant)" - ) + parser.add_argument("--locale", "-l", required=False, help="Stata file locale (e.g. fr, en...).") + parser.add_argument("--type", "-ty", required=False, help="Entity type (e.g. Participant)") parser.add_argument( "--idVariable", "-iv", required=False, - help="Stata variable that provides the entity ID. If not specified, first variable values are considered to be the entity identifiers.", + help="Stata variable that provides the entity ID. If not specified, first variable " + "values are considered to be the entity identifiers.", ) # non specific import arguments @@ -1116,14 +1102,18 @@ def import_data( :param path: File to import in Opal file system :param locale: STATA file locale (e.g. fr, en...) :param entityType: Entity type (e.g. Participant) - :param idVariable: R tibble column that provides the entity ID. If not specified, first column values are considered to be the entity identifiers + :param idVariable: R tibble column that provides the entity ID. If not specified, first + column values are considered to be the entity identifiers :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a + system identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. + data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -1136,9 +1126,7 @@ def import_data( merge, self.verbose, ) - extension_factory = self.OpalExtensionFactory( - path, locale, entityType, idVariable - ) + extension_factory = self.OpalExtensionFactory(path, locale, entityType, idVariable) response = importer.submit(extension_factory) return response.from_json() @@ -1183,9 +1171,7 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument( - "--database", "-db", required=True, help="Name of the SQL database." - ) + parser.add_argument("--database", "-db", required=True, help="Name of the SQL database.") # non specific import arguments io.add_import_arguments(parser) @@ -1226,14 +1212,18 @@ def import_data( """ Import tables from a SQL database. - :param database: The database name as declared in Opal. See ProjectService.get_databases() for a list of databases with 'import' usage. + :param database: The database name as declared in Opal. See ProjectService.get_databases() + for a list of databases with 'import' usage. :param destination: The destination project :param tables: The tables names to be imported (default is all) :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a + system identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. + data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -1259,9 +1249,7 @@ def add(self, factory): """ Add specific datasource factory extension """ - factory["Magma.JdbcDatasourceFactoryDto.params"] = { - "database": self.database - } + factory["Magma.JdbcDatasourceFactoryDto.params"] = {"database": self.database} class ImportXMLCommand: @@ -1330,8 +1318,11 @@ def import_data( :param incremental: Incremental import (new and updated value sets) :param limit: Import limit (maximum number of value sets) :param identifiers: The name of the ID mapping - :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier) - :param merge: Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden) + :param policy: The ID mapping policy: "required" (each identifiers must be mapped prior + importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a + system identifier for each unknown identifier) + :param merge: Merge imported data dictionary with the destination one (default is false, i.e. + data dictionary is overridden) """ importer = io.OpalImporter.build( self.client, @@ -1399,9 +1390,12 @@ def do_command(cls, args): options = {"project": args.project, "files": args.vcf} # send request - uri = core.UriBuilder( - ["project", args.project, "commands", "_import_vcf"] - ).build() + uri = core.UriBuilder([ + "project", + args.project, + "commands", + "_import_vcf", + ]).build() request.resource(uri).post().content(json.dumps(options)).send() finally: client.close() @@ -1425,9 +1419,7 @@ def add_arguments(cls, parser): """ Add import_ids command specific options """ - parser.add_argument( - "--type", "-t", required=True, help="Entity type (e.g. Participant)." - ) + parser.add_argument("--type", "-t", required=True, help="Entity type (e.g. Participant).") @classmethod def do_command(cls, args): @@ -1458,11 +1450,7 @@ def import_ids(self, ids: list, type: str): request.content("\n".join(ids)) # send request - uri = ( - core.UriBuilder(["identifiers", "mappings", "entities", "_import"]) - .query("type", type) - .build() - ) + uri = core.UriBuilder(["identifiers", "mappings", "entities", "_import"]).query("type", type).build() request.post().resource(uri).send() @@ -1484,13 +1472,9 @@ def add_arguments(cls, parser): """ Add import_idsmap command specific options """ - parser.add_argument( - "--type", "-t", required=True, help="Entity type (e.g. Participant)." - ) + parser.add_argument("--type", "-t", required=True, help="Entity type (e.g. Participant).") parser.add_argument("--map", "-m", required=True, help="Mapping name.") - parser.add_argument( - "--separator", "-s", required=False, help="Field separator (default is ,)." - ) + parser.add_argument("--separator", "-s", required=False, help="Field separator (default is ,).") @classmethod def do_command(cls, args): @@ -1500,13 +1484,9 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - print( - "Enter identifiers (one identifiers mapping per line, Ctrl-D to end input):" - ) + print("Enter identifiers (one identifiers mapping per line, Ctrl-D to end input):") ids = sys.stdin.read() - cls(client, args.verbose).import_ids( - ids.split("\n"), args.type, args.map, args.separator - ) + cls(client, args.verbose).import_ids(ids.split("\n"), args.type, args.map, args.separator) finally: client.close() @@ -1529,9 +1509,7 @@ def import_ids(self, ids: list, type: str, map: str, separator: str = ","): request.content("\n".join(ids)) # send request - builder = core.UriBuilder(["identifiers", "mapping", map, "_import"]).query( - "type", type - ) + builder = core.UriBuilder(["identifiers", "mapping", map, "_import"]).query("type", type) if separator: builder.query("separator", separator) uri = builder.build() diff --git a/obiba_opal/io.py b/obiba_opal/io.py index 6f71432..55d464f 100644 --- a/obiba_opal/io.py +++ b/obiba_opal/io.py @@ -11,9 +11,7 @@ def add_import_arguments(parser): """ Add Default Import arguments """ - parser.add_argument( - "--destination", "-d", required=True, help="Destination datasource name" - ) + parser.add_argument("--destination", "-d", required=True, help="Destination datasource name") parser.add_argument( "--tables", "-t", @@ -34,20 +32,20 @@ def add_import_arguments(parser): type=int, help="Import limit (maximum number of value sets)", ) - parser.add_argument( - "--identifiers", "-id", required=False, help="Name of the ID mapping" - ) + parser.add_argument("--identifiers", "-id", required=False, help="Name of the ID mapping") parser.add_argument( "--policy", "-po", required=False, - help='ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" (ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier)', + help='ID mapping policy: "required" (each identifiers must be mapped prior importation, default), "ignore" ' + '(ignore unknown identifiers), "generate" (generate a system identifier for each unknown identifier)', ) parser.add_argument( "--merge", "-mg", action="store_true", - help="Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is overridden).", + help="Merge imported data dictionary with the destination one (default is false, i.e. data dictionary is " + "overridden).", ) parser.add_argument( "--json", @@ -64,9 +62,7 @@ class OpalImporter: class ExtensionFactoryInterface: def add(self, factory): - raise Exception( - "ExtensionFactoryInterface.add() method must be implemented by a concrete class." - ) + raise Exception("ExtensionFactoryInterface.add() method must be implemented by a concrete class.") @classmethod def build( @@ -142,9 +138,12 @@ def table_fullname(t): print(options) print("**") - uri = core.UriBuilder( - ["project", self.destination, "commands", "_import"] - ).build() + uri = core.UriBuilder([ + "project", + self.destination, + "commands", + "_import", + ]).build() response = request.post().resource(uri).content(json.dumps(options)).send() # get job status @@ -207,11 +206,7 @@ def __create_transient_datasource( mergeStr = "false" if self.merge: mergeStr = "true" - uri = ( - core.UriBuilder(["project", self.destination, "transient-datasources"]) - .query("merge", mergeStr) - .build() - ) + uri = core.UriBuilder(["project", self.destination, "transient-datasources"]).query("merge", mergeStr).build() response = request.post().resource(uri).content(json.dumps(factory)).send() transient = json.loads(response.content) @@ -223,14 +218,12 @@ def __create_transient_datasource( def compare_datasource(self, transient): # Compare datasources : /datasource//compare/ - uri = core.UriBuilder( - [ - "datasource", - transient["name"].encode("ascii", "ignore"), - "compare", - self.destination, - ] - ).build() + uri = core.UriBuilder([ + "datasource", + transient["name"].encode("ascii", "ignore"), + "compare", + self.destination, + ]).build() request = self.client.new_request() request.fail_on_error().accept_json().content_type_json() if self.verbose: @@ -241,9 +234,7 @@ def compare_datasource(self, transient): if i["conflicts"]: all_conflicts = [] for c in i["conflicts"]: - all_conflicts.append( - c["code"] + "(" + ", ".join(c["arguments"]) + ")" - ) + all_conflicts.append(c["code"] + "(" + ", ".join(c["arguments"]) + ")") raise Exception("Import conflicts: " + "; ".join(all_conflicts)) @@ -318,9 +309,12 @@ def table_fullname(t): if self.verbose: request.verbose() - uri = core.UriBuilder( - ["project", self.datasource, "commands", "_export"] - ).build() + uri = core.UriBuilder([ + "project", + self.datasource, + "commands", + "_export", + ]).build() response = request.post().resource(uri).content(json.dumps(options)).send() # get job status diff --git a/obiba_opal/perm.py b/obiba_opal/perm.py index cfb6173..397b54b 100644 --- a/obiba_opal/perm.py +++ b/obiba_opal/perm.py @@ -28,9 +28,7 @@ def _add_permission_arguments(self, parser, permissions: list): required=False, help="Fetch permissions", ) - parser.add_argument( - "--add", "-a", action="store_true", required=False, help="Add a permission" - ) + parser.add_argument("--add", "-a", action="store_true", required=False, help="Add a permission") parser.add_argument( "--delete", "-d", @@ -49,9 +47,7 @@ def _add_permission_arguments(self, parser, permissions: list): required=False, help="Subject name to which the permission will be granted/removed (required on add/delete)", ) - parser.add_argument( - "--type", "-ty", required=True, help="Subject type: user or group" - ) + parser.add_argument("--type", "-ty", required=True, help="Subject type: user or group") parser.add_argument( "--json", "-j", @@ -78,28 +74,21 @@ def _validate_args(self, args, permissions): if not args.subject: raise ValueError("The subject name is required") if not args.permission: - raise ValueError( - f"A permission name is required: {', '.join(list(permissions.keys()))}" - ) + raise ValueError(f"A permission name is required: {', '.join(list(permissions.keys()))}") if self._map_permission(args.permission, permissions) is None: - raise ValueError( - f"Valid permissions are: {', '.join(list(permissions.keys()))}" - ) + raise ValueError(f"Valid permissions are: {', '.join(list(permissions.keys()))}") if args.delete: if not args.subject: raise ValueError("The subject name is required") if not args.type or args.type.upper() not in self.SUBJECT_TYPES: - raise ValueError( - f"Valid subject types are: {', '.join(self.SUBJECT_TYPES).lower()}" - ) + raise ValueError(f"Valid subject types are: {', '.join(self.SUBJECT_TYPES).lower()}") - def _make_add_ws( - self, path: list, subject: str, type: str, permission: str, permissions: dict - ): + def _make_add_ws(self, path: list, subject: str, type: str, permission: str, permissions: dict): return ( - core.UriBuilder(path) + core + .UriBuilder(path) .query("type", type.upper()) .query("permission", self._map_permission(permission, permissions)) .query("principal", subject) @@ -107,12 +96,7 @@ def _make_add_ws( ) def _make_delete_ws(self, path: list, subject: str, type: str = "user"): - return ( - core.UriBuilder(path) - .query("type", type.upper()) - .query("principal", subject) - .build() - ) + return core.UriBuilder(path).query("type", type.upper()).query("principal", subject).build() def _make_get_ws(self, path: list, type: str = "user"): return core.UriBuilder(path).query("type", type.upper()).build() @@ -175,11 +159,7 @@ def get_perms(self, project: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() - .resource( - self._make_get_ws(["project", project, "permissions", "project"], type) - ) - .send() + request.get().resource(self._make_get_ws(["project", project, "permissions", "project"], type)).send() ) return response.from_json() @@ -193,9 +173,7 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["project", project, "permissions", "project"], subject, type - ) + self._make_delete_ws(["project", project, "permissions", "project"], subject, type) ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): @@ -277,13 +255,7 @@ def get_perms(self, project: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() - .resource( - self._make_get_ws( - ["project", project, "permissions", "datasource"], type - ) - ) - .send() + request.get().resource(self._make_get_ws(["project", project, "permissions", "datasource"], type)).send() ) return response.from_json() @@ -297,9 +269,7 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["project", project, "permissions", "datasource"], subject, type - ) + self._make_delete_ws(["project", project, "permissions", "datasource"], subject, type) ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): @@ -375,9 +345,7 @@ def do_command(cls, args): if args.delete: service.delete_perms(args.project, args.tables, args.subject, args.type) elif args.add: - service.add_perms( - args.project, args.tables, args.subject, args.type, args.permission - ) + service.add_perms(args.project, args.tables, args.subject, args.type, args.permission) else: res = [] for table in service._ensure_tables(args.project, args.tables): @@ -395,13 +363,7 @@ def get_perms(self, project: str, table: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() - .resource( - self._make_get_ws( - ["project", project, "permissions", "table", table], type - ) - ) - .send() + request.get().resource(self._make_get_ws(["project", project, "permissions", "table", table], type)).send() ) return response.from_json() @@ -429,14 +391,10 @@ def delete_perm(self, project: str, table: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["project", project, "permissions", "table", table], subject, type - ) + self._make_delete_ws(["project", project, "permissions", "table", table], subject, type) ).send() - def add_perms( - self, project: str, tables: list, subject: str, type: str, permission: str - ): + def add_perms(self, project: str, tables: list, subject: str, type: str, permission: str): """ Add project's tables level permissions. @@ -450,9 +408,7 @@ def add_perms( for table in tables_: self.add_perm(project, table, subject, type, permission) - def add_perm( - self, project: str, table: str, subject: str, type: str, permission: str - ): + def add_perm(self, project: str, table: str, subject: str, type: str, permission: str): """ Add project's table level permissions. @@ -479,12 +435,7 @@ def _ensure_tables(self, project: str, tables: list) -> list: """ if not tables: request = self._make_request() - res = ( - request.get() - .resource(core.UriBuilder(["datasource", project, "tables"]).build()) - .send() - .from_json() - ) + res = request.get().resource(core.UriBuilder(["datasource", project, "tables"]).build()).send().from_json() return [x["name"] for x in res] else: return tables @@ -540,9 +491,7 @@ def do_command(cls, args): # send request if args.delete: - service.delete_perms( - args.project, args.table, args.variables, args.subject, args.type - ) + service.delete_perms(args.project, args.table, args.variables, args.subject, args.type) elif args.add: service.add_perms( args.project, @@ -554,12 +503,8 @@ def do_command(cls, args): ) else: res = [] - for variable in service._ensure_variables( - args.project, args.table, args.variables - ): - res = res + service.get_perms( - args.project, args.table, variable, args.type - ) + for variable in service._ensure_variables(args.project, args.table, args.variables): + res = res + service.get_perms(args.project, args.table, variable, args.type) core.Formatter.print_json(res, args.json) finally: client.close() @@ -575,7 +520,8 @@ def get_perms(self, project: str, table: str, variable: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() + request + .get() .resource( self._make_get_ws( [ @@ -594,9 +540,7 @@ def get_perms(self, project: str, table: str, variable: str, type: str) -> list: ) return response.from_json() - def delete_perms( - self, project: str, table: str, variables: list, subject: str, type: str - ): + def delete_perms(self, project: str, table: str, variables: list, subject: str, type: str): """ Delete project's table variables level permissions. @@ -610,9 +554,7 @@ def delete_perms( for variable in variables_: self.delete_perm(project, table, variable, subject, type) - def delete_perm( - self, project: str, table: str, variable: str, subject: str, type: str - ): + def delete_perm(self, project: str, table: str, variable: str, subject: str, type: str): """ Delete project's table variable level permissions. @@ -707,11 +649,16 @@ def _ensure_variables(self, project: str, table: str, variables: list) -> list: if not variables: request = self._make_request() res = ( - request.get() + request + .get() .resource( - core.UriBuilder( - ["datasource", project, "table", table, "variables"] - ).build() + core.UriBuilder([ + "datasource", + project, + "table", + table, + "variables", + ]).build() ) .send() .from_json() @@ -765,9 +712,7 @@ def do_command(cls, args): # send request if args.delete: - service.delete_perms( - args.project, args.resources, args.subject, args.type - ) + service.delete_perms(args.project, args.resources, args.subject, args.type) elif args.add: service.add_perms( args.project, @@ -793,12 +738,9 @@ def get_perms(self, project: str, resource: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() - .resource( - self._make_get_ws( - ["project", project, "permissions", "resource", resource], type - ) - ) + request + .get() + .resource(self._make_get_ws(["project", project, "permissions", "resource", resource], type)) .send() ) return response.from_json() @@ -827,14 +769,10 @@ def delete_perm(self, project: str, resource: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["project", project, "permissions", "resource", resource], subject, type - ) + self._make_delete_ws(["project", project, "permissions", "resource", resource], subject, type) ).send() - def add_perms( - self, project: str, resources: list, subject: str, type: str, permission: str - ): + def add_perms(self, project: str, resources: list, subject: str, type: str, permission: str): """ Add project's resources level permissions. @@ -848,9 +786,7 @@ def add_perms( for resource in resources_: self.add_perm(project, resource, subject, type, permission) - def add_perm( - self, project: str, resource: str, subject: str, type: str, permission: str - ): + def add_perm(self, project: str, resource: str, subject: str, type: str, permission: str): """ Add project's resource level permissions. @@ -877,12 +813,7 @@ def _ensure_resources(self, project: str, resources: list) -> list: """ if not resources: request = self._make_request() - res = ( - request.get() - .resource(core.UriBuilder(["project", project, "resources"]).build()) - .send() - .from_json() - ) + res = request.get().resource(core.UriBuilder(["project", project, "resources"]).build()).send().from_json() return [x["name"] for x in res] else: return resources @@ -942,13 +873,7 @@ def get_perms(self, project: str, type: str) -> list: """ request = self._make_request() response = ( - request.get() - .resource( - self._make_get_ws( - ["project", project, "permissions", "resources"], type - ) - ) - .send() + request.get().resource(self._make_get_ws(["project", project, "permissions", "resources"], type)).send() ) return response.from_json() @@ -962,9 +887,7 @@ def delete_perm(self, project: str, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["project", project, "permissions", "resources"], subject, type - ) + self._make_delete_ws(["project", project, "permissions", "resources"], subject, type) ).send() def add_perm(self, project: str, subject: str, type: str, permission: str): @@ -1034,11 +957,7 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = ( - request.get() - .resource(self._make_get_ws(["system", "permissions", "r"], type)) - .send() - ) + response = request.get().resource(self._make_get_ws(["system", "permissions", "r"], type)).send() return response.from_json() def delete_perm(self, subject: str, type: str): @@ -1049,9 +968,7 @@ def delete_perm(self, subject: str, type: str): :param type: The subject type ('user' or 'group') """ request = self._make_request() - request.delete().resource( - self._make_delete_ws(["system", "permissions", "r"], subject, type) - ).send() + request.delete().resource(self._make_delete_ws(["system", "permissions", "r"], subject, type)).send() def add_perm(self, subject: str, type: str, permission: str): """ @@ -1119,11 +1036,7 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = ( - request.get() - .resource(self._make_get_ws(["system", "permissions", "datashield"], type)) - .send() - ) + response = request.get().resource(self._make_get_ws(["system", "permissions", "datashield"], type)).send() return response.from_json() def delete_perm(self, subject: str, type: str): @@ -1134,9 +1047,7 @@ def delete_perm(self, subject: str, type: str): :param type: The subject type ('user' or 'group') """ request = self._make_request() - request.delete().resource( - self._make_delete_ws(["system", "permissions", "datashield"], subject, type) - ).send() + request.delete().resource(self._make_delete_ws(["system", "permissions", "datashield"], subject, type)).send() def add_perm(self, subject: str, type: str, permission: str): """ @@ -1204,13 +1115,7 @@ def get_perms(self, type: str) -> list: :param type: The subject type ('user' or 'group') """ request = self._make_request() - response = ( - request.get() - .resource( - self._make_get_ws(["system", "permissions", "administration"], type) - ) - .send() - ) + response = request.get().resource(self._make_get_ws(["system", "permissions", "administration"], type)).send() return response.from_json() def delete_perm(self, subject: str, type: str): @@ -1222,9 +1127,7 @@ def delete_perm(self, subject: str, type: str): """ request = self._make_request() request.delete().resource( - self._make_delete_ws( - ["system", "permissions", "administration"], subject, type - ) + self._make_delete_ws(["system", "permissions", "administration"], subject, type) ).send() def add_perm(self, subject: str, type: str, permission: str): diff --git a/obiba_opal/project.py b/obiba_opal/project.py index bf26058..cfe776d 100755 --- a/obiba_opal/project.py +++ b/obiba_opal/project.py @@ -35,9 +35,7 @@ def add_arguments(cls, parser): help="Project database name. If not provided only views can be added.", ) parser.add_argument("--title", "-t", required=False, help="Project title.") - parser.add_argument( - "--description", "-dc", required=False, help="Project description." - ) + parser.add_argument("--description", "-dc", required=False, help="Project description.") parser.add_argument( "--tags", "-tg", @@ -134,9 +132,7 @@ def get_project(self, name: str, fail_safe: bool = True) -> dict: if not name: raise ValueError("The project name is required.") request = self._make_request(fail_safe) - response = ( - request.get().resource(core.UriBuilder(["project", name]).build()).send() - ) + response = request.get().resource(core.UriBuilder(["project", name]).build()).send() return response.from_json() if response.code == 200 else None def delete_project(self, name: str): @@ -163,7 +159,8 @@ def add_project( Add a project. :param name: The project name - :param database: The project database name. If not provided only views can be added. See get_databases() for the list of databases available for storage. + :param database: The project database name. If not provided only views can be added. See + get_databases() for the list of databases available for storage. :param title: The project title :param description: The project description :param tags: The list of project tags @@ -189,9 +186,7 @@ def add_project( request = self._make_request() request.accept_json().content_type_json() - request.post().resource(core.UriBuilder(["projects"]).build()).content( - json.dumps(project) - ).send() + request.post().resource(core.UriBuilder(["projects"]).build()).content(json.dumps(project)).send() def get_databases(self, usage: str = "storage") -> list: """ @@ -201,13 +196,7 @@ def get_databases(self, usage: str = "storage") -> list: """ request = self._make_request() request.accept_json() - response = ( - request.get() - .resource( - core.UriBuilder(["system", "databases"]).query("usage", usage).build() - ) - .send() - ) + response = request.get().resource(core.UriBuilder(["system", "databases"]).query("usage", usage).build()).send() return response.from_json() def _make_request(self, fail_safe: bool = False): @@ -233,9 +222,7 @@ def add_arguments(self, parser): """ Add command specific options """ - parser.add_argument( - "--project", "-pr", required=True, help="Source project name" - ) + parser.add_argument("--project", "-pr", required=True, help="Source project name") parser.add_argument( "--archive", "-ar", @@ -288,7 +275,8 @@ def backup_project( :param project: The project name :param archive: The archive directory path in the Opal file system - :param views_as_tables: Treat views as tables, i.e. export data instead of keeping derivation scripts + :param views_as_tables: Treat views as tables, i.e. export data instead of + keeping derivation scripts :param force: Force overwriting an existing backup folder """ # Build and send request diff --git a/obiba_opal/security.py b/obiba_opal/security.py index 20eb9c8..59b073b 100644 --- a/obiba_opal/security.py +++ b/obiba_opal/security.py @@ -20,9 +20,7 @@ def do_command(cls, args): if args.verbose: request.verbose() - response = ( - request.get().resource("/system/crypto/encrypt/" + args.plain).send() - ) + response = request.get().resource("/system/crypto/encrypt/" + args.plain).send() print(response.content) finally: client.close() @@ -47,11 +45,7 @@ def do_command(cls, args): if args.verbose: request.verbose() - response = ( - request.get() - .resource("/system/crypto/decrypt/" + args.encrypted) - .send() - ) + response = request.get().resource("/system/crypto/decrypt/" + args.encrypted).send() print(response.content) finally: client.close() diff --git a/obiba_opal/sql.py b/obiba_opal/sql.py index d72a9f9..0ff386c 100644 --- a/obiba_opal/sql.py +++ b/obiba_opal/sql.py @@ -20,7 +20,9 @@ def add_arguments(cls, parser): "--project", "-pr", required=False, - help="Source project name, that will be used to resolve the table names in the FROM statement. If not provided, the fully qualified table names must be specified in the query (escaped by backquotes: `.
`).", + help="Source project name, that will be used to resolve the table names in the FROM statement. If not " + "provided, the fully qualified table names must be specified in the query (escaped by backquotes: " + "`.
`).", ) parser.add_argument("--query", "-q", required=True, help="SQL query") parser.add_argument( @@ -97,13 +99,15 @@ def add_arguments(cls, parser): "--project", "-pr", required=False, - help="Project name used as the SQL execution context to filter. If not specified, history from any context is returned. If '*' is specified, history of SQL execution without context is returned.", + help="Project name used as the SQL execution context to filter. If not specified, history from any context " + "is returned. If '*' is specified, history of SQL execution without context is returned.", ) parser.add_argument( "--offset", "-os", required=False, - help="Number of history items to skip. Default is 0 (note that the items are ordered by most recent first).", + help="Number of history items to skip. Default is 0 (note that the items are ordered by most recent " + "first).", ) parser.add_argument( "--limit", @@ -115,7 +119,8 @@ def add_arguments(cls, parser): "--subject", "-sb", required=False, - help="Filter by user name, only administrators can retrieve SQL history of other users. If '*' is specified, history of all users is retrieved. Default is the current user name.", + help="Filter by user name, only administrators can retrieve SQL history of other users. If '*' is " + "specified, history of all users is retrieved. Default is the current user name.", ) parser.add_argument( "--json", @@ -134,13 +139,19 @@ def do_command(cls, args): client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: if args.subject and args.subject != args.user: - builder = core.UriBuilder( - ["system", "subject-profile", args.subject, "sql-history"] - ) + builder = core.UriBuilder([ + "system", + "subject-profile", + args.subject, + "sql-history", + ]) else: - builder = core.UriBuilder( - ["system", "subject-profile", "_current", "sql-history"] - ) + builder = core.UriBuilder([ + "system", + "subject-profile", + "_current", + "sql-history", + ]) if args.project: builder.query("datasource", args.project) if args.offset: diff --git a/obiba_opal/subjects.py b/obiba_opal/subjects.py index 96da781..e72d1c0 100755 --- a/obiba_opal/subjects.py +++ b/obiba_opal/subjects.py @@ -25,7 +25,8 @@ def add_arguments(self, parser): "--upassword", "-upa", required=False, - help="User password of at least 8 characters, must contain at least one digit, one upper case alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space.", + help="User password of at least 8 characters, must contain at least one digit, one upper case alphabet, " + "one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space.", ) parser.add_argument( "--ucertificate", @@ -40,9 +41,7 @@ def add_arguments(self, parser): required=False, help="Disable user account (if omitted the user is enabled by default).", ) - parser.add_argument( - "--groups", "-g", nargs="+", required=False, help="User groups" - ) + parser.add_argument("--groups", "-g", nargs="+", required=False, help="User groups") parser.add_argument( "--fetch", @@ -164,7 +163,8 @@ def update_user( Update a user. :param name: The user name - :param upassword: The user password of at least 8 characters, must contain at least one digit, one upper case alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space + :param upassword: The user password of at least 8 characters, must contain at least one digit, one upper case + alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space :param ucertificate: The user certificate file. :param groups: The list of groups :param disabled: Not enabled @@ -182,9 +182,7 @@ def update_user( if upassword: if userInfo["authenticationType"] == "CERTIFICATE": - raise ValueError( - f"{user['name']} requires a certificate (public key) file" - ) + raise ValueError(f"{user['name']} requires a certificate (public key) file") if len(upassword) < 8: raise ValueError("Password must contain at least 8 characters.") user["authenticationType"] = "PASSWORD" @@ -218,7 +216,8 @@ def add_user( Add a user. :param name: The user name - :param upassword: The user password of at least 8 characters, must contain at least one digit, one upper case alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space + :param upassword: The user password of at least 8 characters, must contain at least one digit, one upper case + alphabet, one lower case alphabet, one special character (which includes @#$%^&+=!) and no white space :param ucertificate: The user certificate file. :param groups: The list of groups :param disabled: Not enabled diff --git a/obiba_opal/system.py b/obiba_opal/system.py index fc64346..1b6cae7 100644 --- a/obiba_opal/system.py +++ b/obiba_opal/system.py @@ -26,9 +26,7 @@ def add_arguments(cls, parser): help="Pretty JSON formatting of the response", ) - parser.add_argument( - "--version", action="store_true", required=False, help="Opal version number" - ) + parser.add_argument("--version", action="store_true", required=False, help="Opal version number") parser.add_argument( "--env", action="store_true", @@ -107,9 +105,7 @@ def add_arguments(cls, parser): Add plugin command specific options """ - parser.add_argument( - "--list", "-ls", action="store_true", help="List the installed plugins." - ) + parser.add_argument("--list", "-ls", action="store_true", help="List the installed plugins.") parser.add_argument( "--updates", "-lu", @@ -135,19 +131,15 @@ def add_arguments(cls, parser): "--remove", "-rm", required=False, - help="Remove a plugin by providing its name. Requires system " - "restart to be effective.", + help="Remove a plugin by providing its name. Requires system restart to be effective.", ) parser.add_argument( "--reinstate", "-ri", required=False, - help="Reinstate a plugin that was previously removed by providing " - "its name.", - ) - parser.add_argument( - "--fetch", "-f", required=False, help="Get the named plugin description." + help="Reinstate a plugin that was previously removed by providing its name.", ) + parser.add_argument("--fetch", "-f", required=False, help="Get the named plugin description.") parser.add_argument( "--configure", "-c", @@ -202,58 +194,35 @@ def do_command(cls, args): response = request.get().resource("/plugins/_available").send() elif args.install: if args.install.startswith("/"): - response = ( - request.post().resource("/plugins?file=" + args.install).send() - ) + response = request.post().resource("/plugins?file=" + args.install).send() else: nameVersion = args.install.split(":") if len(nameVersion) == 1: - response = ( - request.post() - .resource("/plugins?name=" + nameVersion[0]) - .send() - ) + response = request.post().resource("/plugins?name=" + nameVersion[0]).send() else: response = ( - request.post() - .resource( - "/plugins?name=" - + nameVersion[0] - + "&version=" - + nameVersion[1] - ) + request + .post() + .resource("/plugins?name=" + nameVersion[0] + "&version=" + nameVersion[1]) .send() ) elif args.fetch: response = request.get().resource("/plugin/" + args.fetch).send() elif args.configure: request.content_type_text_plain() - print( - "Enter plugin site properties (one property per line, " - "Ctrl-D to end input):" - ) + print("Enter plugin site properties (one property per line, Ctrl-D to end input):") request.content(sys.stdin.read()) - response = ( - request.put().resource("/plugin/" + args.configure + "/cfg").send() - ) + response = request.put().resource("/plugin/" + args.configure + "/cfg").send() elif args.remove: response = request.delete().resource("/plugin/" + args.remove).send() elif args.reinstate: response = request.put().resource("/plugin/" + args.reinstate).send() elif args.status: - response = ( - request.get().resource("/plugin/" + args.status + "/service").send() - ) + response = request.get().resource("/plugin/" + args.status + "/service").send() elif args.start: - response = ( - request.put().resource("/plugin/" + args.start + "/service").send() - ) + response = request.put().resource("/plugin/" + args.start + "/service").send() elif args.stop: - response = ( - request.delete() - .resource("/plugin/" + args.stop + "/service") - .send() - ) + response = request.delete().resource("/plugin/" + args.stop + "/service").send() # format response res = response.content.decode("utf-8") @@ -287,18 +256,14 @@ def __make_request(self): def download(self, name: str): resource = self.OpalTaxonomyResource(name) - request = ( - self.__make_request() - .get() - .resource(resource.get_download_ws()) - .accept("text/plain") - ) + request = self.__make_request().get().resource(resource.get_download_ws()).accept("text/plain") return request.send() def importFile(self, file: str, override: bool = False): uri = ( - core.UriBuilder(["system", "conf", "taxonomies", "import", "_file"]) + core + .UriBuilder(["system", "conf", "taxonomies", "import", "_file"]) .query("file", file) .query("override", str(override).lower()) .build() @@ -306,12 +271,7 @@ def importFile(self, file: str, override: bool = False): return self.__make_request().post().resource(uri).send() def delete(self, name: str): - return ( - self.__make_request() - .resource(self.OpalTaxonomyResource(name).get_ws()) - .delete() - .send() - ) + return self.__make_request().resource(self.OpalTaxonomyResource(name).get_ws()).delete().send() def confirmAndDelete(self, name: str, rejectHandler): confirmed = input(f"Delete the taxonomy {name}? [y/N]: ") @@ -321,12 +281,7 @@ def confirmAndDelete(self, name: str, rejectHandler): return rejectHandler() def summaries(self): - return ( - self.__make_request() - .get() - .resource("/system/conf/taxonomies/summaries") - .send() - ) + return self.__make_request().get().resource("/system/conf/taxonomies/summaries").send() @classmethod def add_arguments(cls, parser): @@ -345,12 +300,8 @@ def add_arguments(cls, parser): required=False, help="Import a taxonomy from the provided Opal file path (YAML format).", ) - parser.add_argument( - "--delete", "-dt", required=False, help="Delete a taxonomy by name." - ) - parser.add_argument( - "--force", "-f", action="store_true", help="Skip confirmation." - ) + parser.add_argument("--delete", "-dt", required=False, help="Delete a taxonomy by name.") + parser.add_argument("--force", "-f", action="store_true", help="Skip confirmation.") parser.add_argument( "--json", "-j", @@ -388,12 +339,7 @@ def rejectHandler(): response = service.summaries() # format response - if ( - args.json - and not args.download - and not args.delete - and not args.import_file - ): + if args.json and not args.download and not args.delete and not args.import_file: print(response.pretty_json()) else: # output to stdout as string @@ -444,21 +390,15 @@ def add_arguments(cls, parser): action="store_true", help="Show JSON representation of the task", ) - parser.add_argument( - "--status", "-st", action="store_true", help="Get the status of the task" - ) + parser.add_argument("--status", "-st", action="store_true", help="Get the status of the task") parser.add_argument( "--wait", "-w", action="store_true", help="Wait for the task to complete (successfully or not)", ) - parser.add_argument( - "--cancel", "-c", action="store_true", help="Cancel the task" - ) - parser.add_argument( - "--delete", "-d", action="store_true", help="Delete the task" - ) + parser.add_argument("--cancel", "-c", action="store_true", help="Cancel the task") + parser.add_argument("--delete", "-d", action="store_true", help="Delete the task") parser.add_argument( "--json", "-j", @@ -482,9 +422,7 @@ def do_command(cls, args): id = str(json.loads(id)["id"]) args.id = id - if args.show or not ( - args.show or args.wait or args.status or args.cancel or args.delete - ): + if args.show or not (args.show or args.wait or args.status or args.cancel or args.delete): res = service.get_task(args.id) core.Formatter.print_json(res, args.json) if args.wait: @@ -520,12 +458,7 @@ def wait_task(self, id: str | int): if "progress" in task: progress = task["progress"] if "message" in progress: - sys.stdout.write( - "\r\033[K" - + str(progress["percent"]) - + "% " - + progress["message"] - ) + sys.stdout.write("\r\033[K" + str(progress["percent"]) + "% " + progress["message"]) else: sys.stdout.write("\r\033[K" + str(progress["percent"]) + "%") else: @@ -603,8 +536,7 @@ def add_arguments(cls, parser): """ parser.add_argument( "ws", - help="Web service path, for instance: /datasource/xxx/table/yyy/" - "variable/vvv", + help="Web service path, for instance: /datasource/xxx/table/yyy/variable/vvv", ) parser.add_argument( "--method", @@ -628,8 +560,7 @@ def add_arguments(cls, parser): "--headers", "-hs", required=False, - help='Custom headers in the form of: { "Key2": "Value2", ' - '"Key2": "Value2" }', + help='Custom headers in the form of: { "Key2": "Value2", "Key2": "Value2" }', ) parser.add_argument( "--json", diff --git a/obiba_opal/table.py b/obiba_opal/table.py index 8f5db75..b182cc4 100644 --- a/obiba_opal/table.py +++ b/obiba_opal/table.py @@ -23,9 +23,7 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument( - "--project", "-pr", required=True, help="Source project name" - ) + parser.add_argument("--project", "-pr", required=True, help="Source project name") parser.add_argument( "--tables", "-t", @@ -33,9 +31,7 @@ def add_arguments(cls, parser): required=False, help="List of table names to be copied (default is all)", ) - parser.add_argument( - "--destination", "-d", required=True, help="Destination project name" - ) + parser.add_argument("--destination", "-d", required=True, help="Destination project name") parser.add_argument( "--name", "-na", @@ -43,12 +39,8 @@ def add_arguments(cls, parser): help="New table name (required if source and destination are the " "same, ignored if more than one table is to be copied)", ) - parser.add_argument( - "--incremental", "-i", action="store_true", help="Incremental copy" - ) - parser.add_argument( - "--nulls", "-nu", action="store_true", help="Copy the null values" - ) + parser.add_argument("--incremental", "-i", action="store_true", help="Incremental copy") + parser.add_argument("--nulls", "-nu", action="store_true", help="Copy the null values") parser.add_argument( "--json", "-j", @@ -118,7 +110,8 @@ def _retrieve_datasource_tables(self, project: str) -> list: if self.verbose: request.verbose() response = ( - request.fail_on_error() + request + .fail_on_error() .get() .resource(core.UriBuilder(["datasource", project, "tables"]).build()) .send() @@ -168,9 +161,7 @@ def do_command(cls, args): # Build and send requests client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - DictionaryService(client, args.verbose).delete_tables( - args.project, args.tables - ) + DictionaryService(client, args.verbose).delete_tables(args.project, args.tables) finally: client.close() @@ -190,9 +181,7 @@ def add_arguments(cls, parser): """ Add command specific options """ - parser.add_argument( - "--project", "-pr", required=True, help="Source project name" - ) + parser.add_argument("--project", "-pr", required=True, help="Source project name") parser.add_argument( "--views", "-vw", @@ -223,9 +212,7 @@ def do_command(cls, args): # Build and send request client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - BackupViewService(client, args.verbose).backup_views( - args.project, args.views, args.output, args.force - ) + BackupViewService(client, args.verbose).backup_views(args.project, args.views, args.output, args.force) finally: client.close() @@ -239,11 +226,7 @@ def backup_view(self, project: str, view, outdir, force: bool): request.fail_on_error() if self.verbose: request.verbose() - response = ( - request.get() - .resource(core.UriBuilder(["datasource", project, "view", view]).build()) - .send() - ) + response = request.get().resource(core.UriBuilder(["datasource", project, "view", view]).build()).send() dowrite = True if os.path.exists(outpath) and not force: @@ -302,12 +285,7 @@ def _retrieve_datasource_views(self, project: str) -> list: request.fail_on_error() if self.verbose: request.verbose() - response = ( - request.get() - .resource(core.UriBuilder(["datasource", project, "tables"]).build()) - .send() - .from_json() - ) + response = request.get().resource(core.UriBuilder(["datasource", project, "tables"]).build()).send().from_json() views = [] for table in response: @@ -331,9 +309,7 @@ def add_arguments(cls, parser): """ Add data command specific options """ - parser.add_argument( - "--project", "-pr", required=True, help="Destination project name" - ) + parser.add_argument("--project", "-pr", required=True, help="Destination project name") parser.add_argument( "--views", "-vw", @@ -346,8 +322,7 @@ def add_arguments(cls, parser): "--input", "-in", required=False, - help="Input directory name or input zip file containing JSON views " - "(default is current directory)", + help="Input directory name or input zip file containing JSON views (default is current directory)", ) parser.add_argument( "--force", @@ -368,9 +343,7 @@ def do_command(cls, args): service = RestoreViewService(client, args.verbose) service.restore_views(args.project, args.views, args.input, args.force) - def restore_views( - self, project: str, views: list, input: str = None, force: bool = False - ): + def restore_views(self, project: str, views: list, input: str = None, force: bool = False): obsviews = self._retrieve_datasource_views(project) # list input directory content @@ -383,8 +356,7 @@ def restore_views( for viewfile in [ filename for filename in inzip.namelist() - if filename.endswith(".json") - and (not views or filename[:-5] in views) + if filename.endswith(".json") and (not views or filename[:-5] in views) ]: self._restore_zipped_view(project, obsviews, viewfile, inzip, force) else: @@ -393,12 +365,7 @@ def restore_views( def _retrieve_datasource_views(self, project: str): request = self._make_request() - response = ( - request.get() - .resource(core.UriBuilder(["datasource", project, "tables"]).build()) - .send() - .from_json() - ) + response = request.get().resource(core.UriBuilder(["datasource", project, "tables"]).build()).send().from_json() views = [] for table in response: @@ -407,9 +374,7 @@ def _retrieve_datasource_views(self, project: str): return views - def _restore_view( - self, project: str, obsviews: list, infile: str, force: bool = False - ): + def _restore_view(self, project: str, obsviews: list, infile: str, force: bool = False): view = os.path.basename(infile[:-5]) # supposed to be a .json file path dowrite = True @@ -429,15 +394,11 @@ def _restore_view( if view in obsviews: request.put().resource( - core.UriBuilder(["datasource", project, "view", view]) - .query("comment", "restore-view") - .build() + core.UriBuilder(["datasource", project, "view", view]).query("comment", "restore-view").build() ).send() else: request.post().resource( - core.UriBuilder(["datasource", project, "views"]) - .query("comment", "restore-view") - .build() + core.UriBuilder(["datasource", project, "views"]).query("comment", "restore-view").build() ).send() def _restore_zipped_view( @@ -466,15 +427,11 @@ def _restore_zipped_view( if view in obsviews: request.put().resource( - core.UriBuilder(["datasource", project, "view", view]) - .query("comment", "restore-view") - .build() + core.UriBuilder(["datasource", project, "view", view]).query("comment", "restore-view").build() ).send() else: request.post().resource( - core.UriBuilder(["datasource", project, "views"]) - .query("comment", "restore-view") - .build() + core.UriBuilder(["datasource", project, "views"]).query("comment", "restore-view").build() ).send() def _list_json_files(self, dirref: str, basenames): diff --git a/tests/test_core.py b/tests/test_core.py index 15bc897..0a59535 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -13,20 +13,17 @@ def setup_class(cls): cls.SSL_KEY = "./resources/certificates/privatekey.pem" def test_sendRestBadServer(self): - # FIXME for some reason, the cookie_file is not removed (despite the os.remove() is called and os.path.exists() says it was removed) + # FIXME for some reason, the cookie_file is not removed (despite the os.remove() + # is called and os.path.exists() says it was removed) try: # this one will make a request to check if an OTP is needed - OpalClient.buildWithAuthentication( - server="http://deadbeef:8080", user=TEST_USER, password=TEST_PASSWORD - ) + OpalClient.buildWithAuthentication(server="http://deadbeef:8080", user=TEST_USER, password=TEST_PASSWORD) assert False except Exception: assert True def test_sendRestBadCredentials(self): - client = OpalClient.buildWithAuthentication( - server=TEST_SERVER, user="admin", password=TEST_PASSWORD - ) + client = OpalClient.buildWithAuthentication(server=TEST_SERVER, user="admin", password=TEST_PASSWORD) try: self.assertRaises(Exception, self.__sendSimpleRequest, client.new_request()) @@ -36,9 +33,7 @@ def test_sendRestBadCredentials(self): def test_sendRest(self): client = None try: - client = OpalClient.buildWithAuthentication( - server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD - ) + client = OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD) self.__sendSimpleRequest(client.new_request()) except Exception as e: self.fail(e) diff --git a/tests/test_project.py b/tests/test_project.py index 454dfb6..48725b8 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -38,9 +38,7 @@ def test_add_delete_project(self): def test_backup_command(self): client = self.client - res = BackupProjectCommand(client).backup_project( - "CNSIM", "/tmp/test", force=True - ) + res = BackupProjectCommand(client).backup_project("CNSIM", "/tmp/test", force=True) assert res["command"] == "backup" assert res["name"] == "backup" assert res["project"] == "CNSIM" diff --git a/tests/test_taxonomy.py b/tests/test_taxonomy.py index 80bedb2..8dd388d 100644 --- a/tests/test_taxonomy.py +++ b/tests/test_taxonomy.py @@ -29,9 +29,7 @@ def test_1_importFile(self): # Read and modify the taxonomy file to use randomized name with open("./tests/resources/OBiBa_taxonomyTest.yml") as f: content = f.read() - content = content.replace( - '"OBiBa_taxonomyTest"', f'"{self.TEST_TAXONOMY_NAME}"' - ) + content = content.replace('"OBiBa_taxonomyTest"', f'"{self.TEST_TAXONOMY_NAME}"') with open(self.LOCAL_TAXONOMY_FILE, "w") as f: f.write(content) try: diff --git a/tests/utils.py b/tests/utils.py index aa7cdd1..de04b1a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,6 +7,4 @@ def make_client(): - return OpalClient.buildWithAuthentication( - server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD - ) + return OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD) From 3bcf7f3a7f77d8fdf2829fdb7b842ec35651cffb Mon Sep 17 00:00:00 2001 From: ymarcon Date: Mon, 9 Feb 2026 18:23:31 +0100 Subject: [PATCH 6/9] Fixed remaining lint errors --- obiba_opal/console.py | 10 +++++++--- obiba_opal/dictionary.py | 21 ++++++++++----------- obiba_opal/exports.py | 14 +++----------- obiba_opal/imports.py | 8 -------- obiba_opal/perm.py | 15 ++++++++------- obiba_opal/subjects.py | 17 +++-------------- obiba_opal/system.py | 2 +- obiba_opal/table.py | 7 +++---- tests/test_core.py | 12 +++++++----- tests/test_file.py | 30 +++++++++++++++++++----------- tests/test_subjects.py | 2 +- tests/test_taxonomy.py | 28 +++++++++++++++++++--------- 12 files changed, 81 insertions(+), 85 deletions(-) diff --git a/obiba_opal/console.py b/obiba_opal/console.py index d2cf351..259b893 100755 --- a/obiba_opal/console.py +++ b/obiba_opal/console.py @@ -553,9 +553,13 @@ def run(): if hasattr(args, "func"): try: # Prompt for a missing password only when user/password is required - if not (args.ssl_cert or args.ssl_key) and not args.token: - if not args.password or len(args.password) == 0: - args.password = prompt_password() + if ( + not (args.ssl_cert or args.ssl_key) + and not args.token + and (not args.password or len(args.password) == 0) + and args.user + ): + args.password = prompt_password() args.func(args) except HTTPError as e: Formatter.print_json(e.error, args.json if hasattr(args, "json") else False) diff --git a/obiba_opal/dictionary.py b/obiba_opal/dictionary.py index 98f46a0..b60dca5 100755 --- a/obiba_opal/dictionary.py +++ b/obiba_opal/dictionary.py @@ -361,17 +361,16 @@ def _handle_variable( if locale else "namespace" in attribute and "locale" not in attribute ) - if do_search: - if not taxonomies or attribute["namespace"] in taxonomies: - row = [ - datasource, - table, - variableObject["name"], - attribute["namespace"], - attribute["name"], - attribute["value"], - ] - writer.writerow(row) + if do_search and (not taxonomies or attribute["namespace"] in taxonomies): + row = [ + datasource, + table, + variableObject["name"], + attribute["namespace"], + attribute["name"], + attribute["value"], + ] + writer.writerow(row) class ImportAnnotationsService: diff --git a/obiba_opal/exports.py b/obiba_opal/exports.py index ce6a609..5a2f302 100644 --- a/obiba_opal/exports.py +++ b/obiba_opal/exports.py @@ -371,11 +371,7 @@ def export_data( multilines=multilines, verbose=self.verbose, ) - response = None - if output.endswith(".sas7bdat"): - response = exporter.submit("RSAS") - else: - response = exporter.submit("RXPT") + response = exporter.submit("RSAS") if output.endswith(".sas7bdat") else exporter.submit("RXPT") return response.from_json() @@ -475,11 +471,7 @@ def export_data( multilines=multilines, verbose=self.verbose, ) - response = None - if output.endswith(".sav"): - response = exporter.submit("RSPSS") - else: - response = exporter.submit("RZSPSS") + response = exporter.submit("RSPSS") if output.endswith(".sav") else exporter.submit("RZSPSS") return response.from_json() @@ -772,7 +764,7 @@ def do_command(cls, args): # Build and send requests client = core.OpalClient.build(core.OpalClient.LoginInfo.parse(args)) try: - res = ExportVCFCommand(client, args.verbose).export_data( + ExportVCFCommand(client, args.verbose).export_data( args.project, args.vcf, args.destination, diff --git a/obiba_opal/imports.py b/obiba_opal/imports.py index 71ca0f0..e789db7 100644 --- a/obiba_opal/imports.py +++ b/obiba_opal/imports.py @@ -1406,10 +1406,6 @@ class ImportIDService: Import identifiers in the identifiers database. """ - def __init__(self, client: core.OpalClient, verbose: bool = False): - self.client = client - self.verbose = verbose - def __init__(self, client: core.OpalClient, verbose: bool = False): self.client = client self.verbose = verbose @@ -1459,10 +1455,6 @@ class ImportIDMapService: Import identifiers mapping into the identifiers database. """ - def __init__(self, client: core.OpalClient, verbose: bool = False): - self.client = client - self.verbose = verbose - def __init__(self, client: core.OpalClient, verbose: bool = False): self.client = client self.verbose = verbose diff --git a/obiba_opal/perm.py b/obiba_opal/perm.py index 397b54b..ba4ba47 100644 --- a/obiba_opal/perm.py +++ b/obiba_opal/perm.py @@ -2,19 +2,21 @@ Opal permissions """ +from dataclasses import dataclass + import obiba_opal.core as core +@dataclass class PermService: """ Base class for permissions management. """ - SUBJECT_TYPES = ("USER", "GROUP") + client: core.OpalClient + verbose: bool = False - def __init__(self, client: core.OpalClient, verbose: bool = False): - self.client = client - self.verbose = verbose + SUBJECT_TYPES = ("USER", "GROUP") @classmethod def _add_permission_arguments(self, parser, permissions: list): @@ -78,9 +80,8 @@ def _validate_args(self, args, permissions): if self._map_permission(args.permission, permissions) is None: raise ValueError(f"Valid permissions are: {', '.join(list(permissions.keys()))}") - if args.delete: - if not args.subject: - raise ValueError("The subject name is required") + if args.delete and not args.subject: + raise ValueError("The subject name is required") if not args.type or args.type.upper() not in self.SUBJECT_TYPES: raise ValueError(f"Valid subject types are: {', '.join(self.SUBJECT_TYPES).lower()}") diff --git a/obiba_opal/subjects.py b/obiba_opal/subjects.py index e72d1c0..3114138 100755 --- a/obiba_opal/subjects.py +++ b/obiba_opal/subjects.py @@ -100,11 +100,7 @@ def do_command(self, args): elif args.delete: service.delete_user(args.name) else: - res = None - if args.name: - res = service.get_user(args.name, False) - else: - res = service.get_users() + res = service.get_user(args.name, False) if args.name else service.get_users() core.Formatter.print_json(res, args.json) finally: client.close() @@ -256,10 +252,7 @@ def _make_ws(self, name: str = None): """ Build the web service resource path """ - if not name: - ws = "/system/subject-credentials" - else: - ws = f"/system/subject-credential/{name}" + ws = "/system/subject-credentials" if not name else f"/system/subject-credential/{name}" return ws @@ -311,11 +304,7 @@ def do_command(self, args): if args.delete: service.delete_group(args.name) else: - res = None - if args.name: - res = service.get_group(args.name) - else: - res = service.get_groups() + res = service.get_group(args.name) if args.name else service.get_groups() core.Formatter.print_json(res, args.json) finally: client.close() diff --git a/obiba_opal/system.py b/obiba_opal/system.py index 1b6cae7..3c93a72 100644 --- a/obiba_opal/system.py +++ b/obiba_opal/system.py @@ -324,7 +324,7 @@ def do_command(cls, args): elif args.import_file: response = service.importFile(args.import_file) elif args.delete: - taxo = cls.OpalTaxonomyResource(args.delete) + cls.OpalTaxonomyResource(args.delete) # confirm if args.force: response = service.delete(args.delete) diff --git a/obiba_opal/table.py b/obiba_opal/table.py index b182cc4..3472953 100644 --- a/obiba_opal/table.py +++ b/obiba_opal/table.py @@ -436,11 +436,10 @@ def _restore_zipped_view( def _list_json_files(self, dirref: str, basenames): matches = [] - for root, dirnames, filenames in os.walk(dirref): + for root, _, filenames in os.walk(dirref): for filename in filenames: - if filename.endswith(".json"): - if not basenames or filename[:-5] in basenames: - matches.append(os.path.join(root, filename)) + if filename.endswith(".json") and (not basenames or filename[:-5] in basenames): + matches.append(os.path.join(root, filename)) return matches def _make_request(self, fail_safe: bool = False): diff --git a/tests/test_core.py b/tests/test_core.py index 0a59535..a9dbc43 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -1,7 +1,9 @@ from argparse import Namespace import unittest from obiba_opal import OpalClient +from obiba_opal.core import HTTPError from os.path import exists +from requests.exceptions import RequestException from tests.utils import TEST_SERVER, TEST_USER, TEST_PASSWORD @@ -18,15 +20,15 @@ def test_sendRestBadServer(self): try: # this one will make a request to check if an OTP is needed OpalClient.buildWithAuthentication(server="http://deadbeef:8080", user=TEST_USER, password=TEST_PASSWORD) - assert False - except Exception: + raise AssertionError("Expected an exception when connecting to a non existing server") from None + except RequestException: assert True def test_sendRestBadCredentials(self): client = OpalClient.buildWithAuthentication(server=TEST_SERVER, user="admin", password=TEST_PASSWORD) try: - self.assertRaises(Exception, self.__sendSimpleRequest, client.new_request()) + self.assertRaises(HTTPError, self.__sendSimpleRequest, client.new_request()) finally: client.close() @@ -86,11 +88,11 @@ def test_validSslLoginInfo(self): def test_invalidServerInfo(self): args = Namespace(opl=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD) - self.assertRaises(Exception, OpalClient.LoginInfo.parse, args) + self.assertRaises(ValueError, OpalClient.LoginInfo.parse, args) def test_invalidLoginInfo(self): args = Namespace(opal=TEST_SERVER, usr="administrator", password=TEST_PASSWORD) - self.assertRaises(Exception, OpalClient.LoginInfo.parse, args) + self.assertRaises(ValueError, OpalClient.LoginInfo.parse, args) def __sendSimpleRequest(self, request): request.fail_on_error() diff --git a/tests/test_file.py b/tests/test_file.py index 777d939..1edc1b4 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -31,12 +31,14 @@ def test_1_fileUpload(self): if response["name"] == self.TEST_FILENAME: assert True else: - assert False + raise AssertionError( + "Failed to upload file, check if the file exists and if the name is correct." + ) from None finally: if os.path.exists(self.LOCAL_UPLOAD_FILE): os.remove(self.LOCAL_UPLOAD_FILE) - except Exception: - assert False + except Exception as e: + raise AssertionError("Failed to upload file, check if the file exists and if the name is correct.") from e def test_2_fileDownload(self): try: @@ -47,9 +49,11 @@ def test_2_fileDownload(self): os.remove(self.TEST_FILE) assert True else: - assert False - except Exception: - assert False + raise AssertionError( + "Failed to download file, check if the file exists and if the name is correct." + ) from None + except Exception as e: + raise AssertionError("Failed to download file, check if the file exists and if the name is correct.") from e def test_3_fileDownloadWithPassword(self): try: @@ -61,9 +65,13 @@ def test_3_fileDownloadWithPassword(self): os.remove(self.TEST_ZIPPED_FILE) assert True else: - assert False - except Exception: - assert False + raise AssertionError( + "Failed to download file with password, check if the file exists and if the name is correct." + ) from None + except Exception as e: + raise AssertionError( + "Failed to download file with password, check if the file exists and if the name is correct." + ) from e def test_4_deleteUpload(self): try: @@ -71,5 +79,5 @@ def test_4_deleteUpload(self): self.service.file_info(self.TEST_FILE) except HTTPError as e: assert e.code == 404 - except Exception: - assert False + except Exception as e: + raise AssertionError("Failed to delete file, check if the file exists and if the name is correct.") from e diff --git a/tests/test_subjects.py b/tests/test_subjects.py index 6c199a0..d641d50 100644 --- a/tests/test_subjects.py +++ b/tests/test_subjects.py @@ -23,7 +23,7 @@ def test_user_group(self): service.add_user(name, upassword, groups=[grp], disabled=True) user = service.get_user(name) assert user["name"] == name - assert user["enabled"] == False + assert not user["enabled"] assert len(user["groups"]) == 1 assert user["groups"][0] == grp assert user["authenticationType"] == "PASSWORD" diff --git a/tests/test_taxonomy.py b/tests/test_taxonomy.py index 8dd388d..0d2838a 100644 --- a/tests/test_taxonomy.py +++ b/tests/test_taxonomy.py @@ -40,20 +40,26 @@ def test_1_importFile(self): fileService.delete_file(self.TEST_TAXONOMY_FILE) assert response.code == 201 else: - assert False + raise AssertionError( + "Failed to import taxonomy, check if the file exists and if the name is correct." + ) from None finally: if os.path.exists(self.LOCAL_TAXONOMY_FILE): os.remove(self.LOCAL_TAXONOMY_FILE) - except Exception: - assert False + except Exception as e: + raise AssertionError( + "Failed to import taxonomy, check if the file exists and if the name is correct." + ) from e def test_2_downloadTaxonomy(self): try: response = self.service.download(self.TEST_TAXONOMY_NAME) assert response.code == 200 and self.TEST_TAXONOMY_NAME in str(response) - except Exception: - assert False + except Exception as e: + raise AssertionError( + "Failed to download taxonomy, check if the name is correct and if the taxonomy was properly imported." + ) from e def test_3_taxonomiesSummary(self): try: @@ -71,8 +77,10 @@ def test_3_taxonomiesSummary(self): ) > 0 ) - except Exception: - assert False + except Exception as e: + raise AssertionError( + "Failed to get taxonomies summaries, check if the taxonomy was properly imported." + ) from e def test_4_deleteTaxonomy(self): try: @@ -82,5 +90,7 @@ def test_4_deleteTaxonomy(self): response = self.service.delete(name) assert response.code == 200 - except Exception: - assert False + except Exception as e: + raise AssertionError( + "Failed to delete taxonomy, check if it was already deleted or if the name is correct." + ) from e \ No newline at end of file From f6cbf32bd50b262a9b0775d3f827ed0e35f578a0 Mon Sep 17 00:00:00 2001 From: ymarcon Date: Tue, 10 Feb 2026 08:16:58 +0100 Subject: [PATCH 7/9] fix: file download the pythonic way --- obiba_opal/file.py | 11 +++++------ tests/test_file.py | 13 +++++++++---- tests/test_taxonomy.py | 2 +- tests/utils.py | 1 + 4 files changed, 16 insertions(+), 11 deletions(-) diff --git a/obiba_opal/file.py b/obiba_opal/file.py index 807cc87..4077b51 100755 --- a/obiba_opal/file.py +++ b/obiba_opal/file.py @@ -82,14 +82,14 @@ def do_command(self, args): finally: client.close() - def download_file(self, path: str, fd, download_password: str = None): + def download_file(self, path: str, outfile: os.PathLike, download_password: str = None): """ Download a file. :param path: The file path in Opal - :param fd: The destination file descriptor (see os.fdopen()) + :param outfile: The destination file object opened in 'wb' mode :param download_password: The password to use to encrypt the - downloaded zip archive + downloaded zip archive """ request = self.client.new_request() request.fail_on_error() @@ -99,9 +99,8 @@ def download_file(self, path: str, fd, download_password: str = None): file = FileService.OpalFile(path) - fp = os.fdopen(fd, "wb") - request.get().resource(file.get_ws()).accept("*/*").header("X-File-Key", download_password).send(fp) - fp.flush() + request.get().resource(file.get_ws()).accept("*/*").header("X-File-Key", download_password).send(outfile) + outfile.flush() def upload_file(self, upload: str, path: str): """ diff --git a/tests/test_file.py b/tests/test_file.py index 1edc1b4..5e25137 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -1,3 +1,4 @@ +from turtle import fd import unittest from tests.utils import make_client from obiba_opal.file import FileService @@ -24,6 +25,7 @@ def setup_class(cls): def test_1_fileUpload(self): try: + print(f"Uploading file to {self.TEST_FILE}...") shutil.copyfile("./tests/resources/data.csv", self.LOCAL_UPLOAD_FILE) try: self.service.upload_file(self.LOCAL_UPLOAD_FILE, "/tmp") @@ -42,9 +44,10 @@ def test_1_fileUpload(self): def test_2_fileDownload(self): try: + print(f"Downloading file to {self.TEST_FILE}...") + # New: pythonic way with open(self.TEST_FILE, "wb") as outfile: - fd = outfile.fileno() - self.service.download_file(self.TEST_FILE, fd) + self.service.download_file(self.TEST_FILE, outfile) if os.path.exists(self.TEST_FILE): os.remove(self.TEST_FILE) assert True @@ -57,9 +60,10 @@ def test_2_fileDownload(self): def test_3_fileDownloadWithPassword(self): try: + print(f"Downloading file with password to {self.TEST_ZIPPED_FILE}...") + # New: pythonic way with open(self.TEST_ZIPPED_FILE, "wb") as outfile: - fd = outfile.fileno() - self.service.download_file(self.TEST_FILE, fd, "12345678") + self.service.download_file(self.TEST_FILE, outfile, "12345678") stat = os.stat(self.TEST_ZIPPED_FILE) if stat.st_size > 0: os.remove(self.TEST_ZIPPED_FILE) @@ -75,6 +79,7 @@ def test_3_fileDownloadWithPassword(self): def test_4_deleteUpload(self): try: + print(f"Deleting file {self.TEST_FILE}...") self.service.delete_file(self.TEST_FILE) self.service.file_info(self.TEST_FILE) except HTTPError as e: diff --git a/tests/test_taxonomy.py b/tests/test_taxonomy.py index 0d2838a..5113d9f 100644 --- a/tests/test_taxonomy.py +++ b/tests/test_taxonomy.py @@ -93,4 +93,4 @@ def test_4_deleteTaxonomy(self): except Exception as e: raise AssertionError( "Failed to delete taxonomy, check if it was already deleted or if the name is correct." - ) from e \ No newline at end of file + ) from e diff --git a/tests/utils.py b/tests/utils.py index de04b1a..75c6868 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,4 +7,5 @@ def make_client(): + print(f"Creating OpalClient for server {TEST_SERVER} with user {TEST_USER}...") return OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD) From 79cc1fb3526a7a0f988dc6660014611a0de336c5 Mon Sep 17 00:00:00 2001 From: ymarcon Date: Tue, 10 Feb 2026 08:20:49 +0100 Subject: [PATCH 8/9] chore: code cleaning --- tests/test_file.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_file.py b/tests/test_file.py index 5e25137..8e7746d 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -1,4 +1,3 @@ -from turtle import fd import unittest from tests.utils import make_client from obiba_opal.file import FileService From 53ea1b3705d54a4328fe4cbcc22d52353aac6b5e Mon Sep 17 00:00:00 2001 From: ymarcon Date: Tue, 10 Feb 2026 08:45:44 +0100 Subject: [PATCH 9/9] chore: code cleanup --- Makefile | 2 ++ obiba_opal/analysis.py | 4 ++-- obiba_opal/core.py | 1 + obiba_opal/data.py | 2 +- obiba_opal/file.py | 9 +++++---- obiba_opal/system.py | 2 +- tests/test_file.py | 8 ++++---- tests/utils.py | 2 +- 8 files changed, 17 insertions(+), 13 deletions(-) diff --git a/Makefile b/Makefile index b34fcba..452161e 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,8 @@ fix: format: uv run ruff format . +check: format fix + build: uv build diff --git a/obiba_opal/analysis.py b/obiba_opal/analysis.py index d028c9d..e740728 100644 --- a/obiba_opal/analysis.py +++ b/obiba_opal/analysis.py @@ -173,7 +173,7 @@ def export_table_analyses(self, project: str, table: str, fd, all_results: bool request = self.client.new_request() request.fail_on_error().accept("application/zip") fp = os.fdopen(fd, "wb") - request.get().resource(self._make_ws(project, table, all_results=all_results)).send() + request.get().resource(self._make_ws(project, table, all_results=all_results)).send(fp) fp.flush() def export_table_analysis(self, project: str, table: str, analysis_id: str, fd, all_results: bool = True): @@ -188,7 +188,7 @@ def export_table_analysis(self, project: str, table: str, analysis_id: str, fd, request = self.client.new_request() request.fail_on_error().accept("application/zip") fp = os.fdopen(fd, "wb") - request.get().resource(self._make_ws(project, table, analysis_id, all_results)).send() + request.get().resource(self._make_ws(project, table, analysis_id, all_results)).send(fp) fp.flush() def _make_ws( diff --git a/obiba_opal/core.py b/obiba_opal/core.py index e302cac..66726ee 100755 --- a/obiba_opal/core.py +++ b/obiba_opal/core.py @@ -201,6 +201,7 @@ def close(self): self.new_request().resource("/auth/session/_current").delete().send() self.session.close() except Exception: + # silently fail as the session might be already closed or the server not reachable pass self.id = None diff --git a/obiba_opal/data.py b/obiba_opal/data.py index 67f7d4c..28603e2 100755 --- a/obiba_opal/data.py +++ b/obiba_opal/data.py @@ -125,7 +125,7 @@ def _get_data(self, name: str, id: str = None, pos: str = None, fd=None) -> any: request.fail_on_error().get().resource(ws) if raw: fp = os.fdopen(fd, "wb") - response = request.accept("*/*").send(fp) + request.accept("*/*").send(fp) fp.flush() return None else: diff --git a/obiba_opal/file.py b/obiba_opal/file.py index 4077b51..15c708e 100755 --- a/obiba_opal/file.py +++ b/obiba_opal/file.py @@ -82,12 +82,12 @@ def do_command(self, args): finally: client.close() - def download_file(self, path: str, outfile: os.PathLike, download_password: str = None): + def download_file(self, path: str, fd: int | os.PathLike, download_password: str = None): """ Download a file. :param path: The file path in Opal - :param outfile: The destination file object opened in 'wb' mode + :param fd: The file descriptor or path to the destination file :param download_password: The password to use to encrypt the downloaded zip archive """ @@ -98,9 +98,10 @@ def download_file(self, path: str, outfile: os.PathLike, download_password: str request.verbose() file = FileService.OpalFile(path) + fp = os.fdopen(fd, "wb") if isinstance(fd, int) else fd - request.get().resource(file.get_ws()).accept("*/*").header("X-File-Key", download_password).send(outfile) - outfile.flush() + request.get().resource(file.get_ws()).accept("*/*").header("X-File-Key", download_password).send(fp) + fp.flush() def upload_file(self, upload: str, path: str): """ diff --git a/obiba_opal/system.py b/obiba_opal/system.py index 3c93a72..c95bae5 100644 --- a/obiba_opal/system.py +++ b/obiba_opal/system.py @@ -31,7 +31,7 @@ def add_arguments(cls, parser): "--env", action="store_true", required=False, - help="Opal java execution environment (JVM related statistic properties", + help="Opal java execution environment (JVM related statistic properties)", ) parser.add_argument( "--status", diff --git a/tests/test_file.py b/tests/test_file.py index 8e7746d..d59a9b8 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -24,7 +24,7 @@ def setup_class(cls): def test_1_fileUpload(self): try: - print(f"Uploading file to {self.TEST_FILE}...") + # print(f"Uploading file to {self.TEST_FILE}...") shutil.copyfile("./tests/resources/data.csv", self.LOCAL_UPLOAD_FILE) try: self.service.upload_file(self.LOCAL_UPLOAD_FILE, "/tmp") @@ -43,7 +43,7 @@ def test_1_fileUpload(self): def test_2_fileDownload(self): try: - print(f"Downloading file to {self.TEST_FILE}...") + # print(f"Downloading file to {self.TEST_FILE}...") # New: pythonic way with open(self.TEST_FILE, "wb") as outfile: self.service.download_file(self.TEST_FILE, outfile) @@ -59,7 +59,7 @@ def test_2_fileDownload(self): def test_3_fileDownloadWithPassword(self): try: - print(f"Downloading file with password to {self.TEST_ZIPPED_FILE}...") + # print(f"Downloading file with password to {self.TEST_ZIPPED_FILE}...") # New: pythonic way with open(self.TEST_ZIPPED_FILE, "wb") as outfile: self.service.download_file(self.TEST_FILE, outfile, "12345678") @@ -78,7 +78,7 @@ def test_3_fileDownloadWithPassword(self): def test_4_deleteUpload(self): try: - print(f"Deleting file {self.TEST_FILE}...") + # print(f"Deleting file {self.TEST_FILE}...") self.service.delete_file(self.TEST_FILE) self.service.file_info(self.TEST_FILE) except HTTPError as e: diff --git a/tests/utils.py b/tests/utils.py index 75c6868..207b163 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -7,5 +7,5 @@ def make_client(): - print(f"Creating OpalClient for server {TEST_SERVER} with user {TEST_USER}...") + # print(f"Creating OpalClient for server {TEST_SERVER} with user {TEST_USER}...") return OpalClient.buildWithAuthentication(server=TEST_SERVER, user=TEST_USER, password=TEST_PASSWORD)