Compare commits

...

10 Commits

7 changed files with 20 additions and 24 deletions

View File

@ -12,7 +12,7 @@
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "pip3 install --user -r requirements.txt",
"postCreateCommand": "pip3 install --user -r requirements.txt && pip install -e .",
"customizations": {
"vscode": {
"extensions": [

1
.gitignore vendored
View File

@ -212,3 +212,4 @@ pyrightconfig.json
# Custom rules (everything added below won't be overriden by 'Generate .gitignore File' if you use 'Update' option)
out.csv

6
Jenkinsfile vendored
View File

@ -33,10 +33,10 @@ pipeline {
parallel {
stage ("git.reslate.systems") {
environment {
TOKEN = credentials('git.reslate.systems')
CREDS = credentials('username-password-rs-git')
}
steps {
sh returnStatus: true, script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*'
sh script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u ${CREDS_USR} -p ${CREDS_PSW} --non-interactive --disable-progress-bar --verbose dist/*'
}
}
stage ("pypi.org") {
@ -47,7 +47,7 @@ pipeline {
TOKEN = credentials('pypi.org')
}
steps {
sh returnStatus: true, script: 'python -m twine upload -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*'
sh script: 'python -m twine upload -u __token__ -p ${TOKEN} --non-interactive --disable-progress-bar --verbose dist/*'
}
}
}

View File

@ -6,9 +6,9 @@ build-backend = "setuptools.build_meta"
name = "autoBIGS.cli"
dynamic = ["version"]
readme = "README.md"
license = {file = "LICENSE"}
dependencies = [
"autoBIGS-engine"
"autoBIGS-engine==0.12.*"
]
requires-python = ">=3.12"
description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases."

View File

@ -1,6 +1,6 @@
from argparse import ArgumentParser, Namespace
import asyncio
from autobigs.engine.data.remote.databases.bigsdb import BIGSdbIndex
from autobigs.engine.analysis.bigsdb import BIGSdbIndex
def setup_parser(parser: ArgumentParser):
parser.description = "Fetches the latest BIGSdb MLST database definitions."
@ -31,15 +31,17 @@ async def run(args: Namespace):
async with BIGSdbIndex() as bigsdb_index:
if args.list_dbs:
known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False)
print("\n".join(known_seqdef_dbs.keys()))
print("The following are all known BIGS database names (sorted alphabetically):")
print("\n".join(sorted(known_seqdef_dbs.keys())))
for bigsdb_schema_name in args.list_bigsdb_schemas:
schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name)
print("The following are the known schemas for \"{0}\", and their associated IDs:".format(bigsdb_schema_name))
for schema_desc, schema_id in schemas.items():
print(f"{schema_desc}: {schema_id}")
if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0):
print("Nothing to do. Try specifying \"-l\".")
print("Nothing to do. Try specifying \"-l\" for a list of known databases, or \"-h\" for more information.")
def run_asynchronously(args: Namespace):
asyncio.run(run(args))

View File

@ -33,6 +33,8 @@ def run():
metadata.version("autoBIGS-engine")}.')
if hasattr(args, "run"):
args.run(args)
elif not args.version:
root_parser.print_usage()
if __name__ == "__main__":

View File

@ -2,9 +2,9 @@
from argparse import ArgumentParser, Namespace
import asyncio
import datetime
from autobigs.engine.data.local.csv import write_mlst_profiles_as_csv
from autobigs.engine.data.local.fasta import read_multiple_fastas
from autobigs.engine.data.remote.databases.bigsdb import BIGSdbIndex
from autobigs.engine.writing import write_mlst_profiles_as_csv
from autobigs.engine.reading import read_multiple_fastas
from autobigs.engine.analysis.bigsdb import BIGSdbIndex
def setup_parser(parser: ArgumentParser):
@ -35,15 +35,6 @@ def setup_parser(parser: ArgumentParser):
help="The output CSV name (.csv will be appended)."
)
parser.add_argument(
"--exact", "-ex",
action="store_true",
dest="exact",
required=False,
default=False,
help="Should run exact matching rather than returning all similar ones"
)
parser.add_argument(
"--stop-on-fail", "-sof",
action="store_true",
@ -58,11 +49,11 @@ def setup_parser(parser: ArgumentParser):
async def run(args: Namespace):
async with BIGSdbIndex() as bigsdb_index:
gen_strings = read_multiple_fastas(args.fastas)
async with await bigsdb_index.build_profiler_from_seqdefdb(args.seqdefdb, args.schema) as mlst_profiler:
mlst_profiles = mlst_profiler.profile_multiple_strings(gen_strings, exact=args.exact)
async with await bigsdb_index.build_profiler_from_seqdefdb(False, args.seqdefdb, args.schema) as mlst_profiler:
mlst_profiles = mlst_profiler.profile_multiple_strings(gen_strings)
failed = await write_mlst_profiles_as_csv(mlst_profiles, args.out)
if len(failed) > 0:
print(f"A total of {len(failed)} IDs failed:\n{"\n".join(failed)}")
print(f"A total of {len(failed)} IDs failed (no profile found):\n{"\n".join(failed)}")
print(f"Completed fetching MLSTs for {len(args.fastas)} sequences.")
def run_asynchronously(args):