Compare commits

..

No commits in common. "e2f19acd5a9a95fc8303769f4b928fdadfa36d85" and "2dfac7c17fd345737efed1a75a3e19a90a7618c8" have entirely different histories.

7 changed files with 24 additions and 20 deletions

View File

@ -12,7 +12,7 @@
// "forwardPorts": [], // "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created. // Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "pip3 install --user -r requirements.txt && pip install -e .", "postCreateCommand": "pip3 install --user -r requirements.txt",
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [

1
.gitignore vendored
View File

@ -212,4 +212,3 @@ pyrightconfig.json
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option) # Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
out.csv

6
Jenkinsfile vendored
View File

@ -33,10 +33,10 @@ pipeline {
parallel { parallel {
stage ("git.reslate.systems") { stage ("git.reslate.systems") {
environment { environment {
CREDS = credentials('username-password-rs-git') TOKEN = credentials('git.reslate.systems')
} }
steps { steps {
sh script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u ${CREDS_USR} -p ${CREDS_PSW} --non-interactive --disable-progress-bar --verbose dist/*' sh returnStatus: true, script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*'
} }
} }
stage ("pypi.org") { stage ("pypi.org") {
@ -47,7 +47,7 @@ pipeline {
TOKEN = credentials('pypi.org') TOKEN = credentials('pypi.org')
} }
steps { steps {
sh script: 'python -m twine upload -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*' sh returnStatus: true, script: 'python -m twine upload -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*'
} }
} }
} }

View File

@ -6,9 +6,9 @@ build-backend = "setuptools.build_meta"
name = "autoBIGS.cli" name = "autoBIGS.cli"
dynamic = ["version"] dynamic = ["version"]
readme = "README.md" readme = "README.md"
license = {file = "LICENSE"}
dependencies = [ dependencies = [
"autoBIGS-engine==0.12.*" "autoBIGS-engine"
] ]
requires-python = ">=3.12" requires-python = ">=3.12"
description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases." description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases."

View File

@ -1,6 +1,6 @@
from argparse import ArgumentParser, Namespace from argparse import ArgumentParser, Namespace
import asyncio import asyncio
from autobigs.engine.analysis.bigsdb import BIGSdbIndex from autobigs.engine.data.remote.databases.bigsdb import BIGSdbIndex
def setup_parser(parser: ArgumentParser): def setup_parser(parser: ArgumentParser):
parser.description = "Fetches the latest BIGSdb MLST database definitions." parser.description = "Fetches the latest BIGSdb MLST database definitions."
@ -31,17 +31,15 @@ async def run(args: Namespace):
async with BIGSdbIndex() as bigsdb_index: async with BIGSdbIndex() as bigsdb_index:
if args.list_dbs: if args.list_dbs:
known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False) known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False)
print("The following are all known BIGS database names (sorted alphabetically):") print("\n".join(known_seqdef_dbs.keys()))
print("\n".join(sorted(known_seqdef_dbs.keys())))
for bigsdb_schema_name in args.list_bigsdb_schemas: for bigsdb_schema_name in args.list_bigsdb_schemas:
schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name) schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name)
print("The following are the known schemas for \"{0}\", and their associated IDs:".format(bigsdb_schema_name))
for schema_desc, schema_id in schemas.items(): for schema_desc, schema_id in schemas.items():
print(f"{schema_desc}: {schema_id}") print(f"{schema_desc}: {schema_id}")
if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0): if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0):
print("Nothing to do. Try specifying \"-l\" for a list of known databases, or \"-h\" for more information.") print("Nothing to do. Try specifying \"-l\".")
def run_asynchronously(args: Namespace): def run_asynchronously(args: Namespace):
asyncio.run(run(args)) asyncio.run(run(args))

View File

@ -33,8 +33,6 @@ def run():
metadata.version("autoBIGS-engine")}.') metadata.version("autoBIGS-engine")}.')
if hasattr(args, "run"): if hasattr(args, "run"):
args.run(args) args.run(args)
elif not args.version:
root_parser.print_usage()
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -2,9 +2,9 @@
from argparse import ArgumentParser, Namespace from argparse import ArgumentParser, Namespace
import asyncio import asyncio
import datetime import datetime
from autobigs.engine.writing import write_mlst_profiles_as_csv from autobigs.engine.data.local.csv import write_mlst_profiles_as_csv
from autobigs.engine.reading import read_multiple_fastas from autobigs.engine.data.local.fasta import read_multiple_fastas
from autobigs.engine.analysis.bigsdb import BIGSdbIndex from autobigs.engine.data.remote.databases.bigsdb import BIGSdbIndex
def setup_parser(parser: ArgumentParser): def setup_parser(parser: ArgumentParser):
@ -35,6 +35,15 @@ def setup_parser(parser: ArgumentParser):
help="The output CSV name (.csv will be appended)." help="The output CSV name (.csv will be appended)."
) )
parser.add_argument(
"--exact", "-ex",
action="store_true",
dest="exact",
required=False,
default=False,
help="Should run exact matching rather than returning all similar ones"
)
parser.add_argument( parser.add_argument(
"--stop-on-fail", "-sof", "--stop-on-fail", "-sof",
action="store_true", action="store_true",
@ -49,11 +58,11 @@ def setup_parser(parser: ArgumentParser):
async def run(args: Namespace): async def run(args: Namespace):
async with BIGSdbIndex() as bigsdb_index: async with BIGSdbIndex() as bigsdb_index:
gen_strings = read_multiple_fastas(args.fastas) gen_strings = read_multiple_fastas(args.fastas)
async with await bigsdb_index.build_profiler_from_seqdefdb(False, args.seqdefdb, args.schema) as mlst_profiler: async with await bigsdb_index.build_profiler_from_seqdefdb(args.seqdefdb, args.schema) as mlst_profiler:
mlst_profiles = mlst_profiler.profile_multiple_strings(gen_strings) mlst_profiles = mlst_profiler.profile_multiple_strings(gen_strings, exact=args.exact)
failed = await write_mlst_profiles_as_csv(mlst_profiles, args.out) failed = await write_mlst_profiles_as_csv(mlst_profiles, args.out)
if len(failed) > 0: if len(failed) > 0:
print(f"A total of {len(failed)} IDs failed (no profile found):\n{"\n".join(failed)}") print(f"A total of {len(failed)} IDs failed:\n{"\n".join(failed)}")
print(f"Completed fetching MLSTs for {len(args.fastas)} sequences.") print(f"Completed fetching MLSTs for {len(args.fastas)} sequences.")
def run_asynchronously(args): def run_asynchronously(args):