Compare commits
7 Commits
0.4.4
...
19b23539b3
Author | SHA1 | Date | |
---|---|---|---|
19b23539b3 | |||
dbd8238cef | |||
4b0fac0801 | |||
d78ae19c4f | |||
6b8376c470 | |||
a4d8de7cc6 | |||
5ef5b6ac08 |
16
.devcontainer/Dockerfile
Normal file
16
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
FROM mcr.microsoft.com/devcontainers/miniconda:1-3
|
||||||
|
|
||||||
|
# Copy environment.yml (if found) to a temp location so we update the environment. Also
|
||||||
|
# copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists.
|
||||||
|
COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/
|
||||||
|
RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \
|
||||||
|
&& rm -rf /tmp/conda-tmp
|
||||||
|
|
||||||
|
# [Optional] Uncomment to install a different version of Python than the default
|
||||||
|
# RUN conda install -y python=3.6 \
|
||||||
|
# && pip install --no-cache-dir pipx \
|
||||||
|
# && pipx reinstall-all
|
||||||
|
|
||||||
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
@@ -1,9 +1,11 @@
|
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||||
// README at: https://github.com/devcontainers/templates/tree/main/src/python
|
// README at: https://github.com/devcontainers/templates/tree/main/src/miniconda
|
||||||
{
|
{
|
||||||
"name": "Python 3",
|
"name": "Miniconda (Python 3)",
|
||||||
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
|
"build": {
|
||||||
"image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
|
"context": "..",
|
||||||
|
"dockerfile": "Dockerfile"
|
||||||
|
},
|
||||||
|
|
||||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||||
// "features": {},
|
// "features": {},
|
||||||
@@ -12,7 +14,9 @@
|
|||||||
// "forwardPorts": [],
|
// "forwardPorts": [],
|
||||||
|
|
||||||
// Use 'postCreateCommand' to run commands after the container is created.
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
"postCreateCommand": "pip3 install --user -r requirements.txt && pip install -e .",
|
"postCreateCommand": "pip install -e .",
|
||||||
|
|
||||||
|
// Configure tool-specific properties.
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
@@ -20,8 +24,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// Configure tool-specific properties.
|
|
||||||
// "customizations": {},
|
|
||||||
|
|
||||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||||
// "remoteUser": "root"
|
// "remoteUser": "root"
|
||||||
|
3
.devcontainer/noop.txt
Normal file
3
.devcontainer/noop.txt
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
This file is copied into the container along with environment.yml* from the
|
||||||
|
parent folder. This is done to prevent the Dockerfile COPY instruction from
|
||||||
|
failing if no environment.yml is found.
|
14
Jenkinsfile
vendored
14
Jenkinsfile
vendored
@@ -2,14 +2,16 @@ pipeline {
|
|||||||
agent {
|
agent {
|
||||||
kubernetes {
|
kubernetes {
|
||||||
cloud 'rsys-devel'
|
cloud 'rsys-devel'
|
||||||
defaultContainer 'pip'
|
defaultContainer 'miniforge3'
|
||||||
inheritFrom 'pip'
|
inheritFrom 'miniforge'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stages {
|
stages {
|
||||||
stage("install") {
|
stage("install") {
|
||||||
steps {
|
steps {
|
||||||
sh 'python -m pip install -r requirements.txt'
|
sh 'conda config --add channels bioconda'
|
||||||
|
sh 'conda config --add channels https://git.reslate.systems/api/packages/ydeng/conda'
|
||||||
|
sh 'conda env update -n base -f environment.yml'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage("unit tests") {
|
stage("unit tests") {
|
||||||
@@ -22,11 +24,14 @@ pipeline {
|
|||||||
stage("build") {
|
stage("build") {
|
||||||
steps {
|
steps {
|
||||||
sh "python -m build"
|
sh "python -m build"
|
||||||
|
sh "grayskull pypi dist/*.tar.gz --maintainers 'Harrison Deng'"
|
||||||
|
sh "python scripts/patch_recipe.py"
|
||||||
|
sh 'conda build autobigs-cli -c bioconda --output-folder conda-bld --verify'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage("archive") {
|
stage("archive") {
|
||||||
steps {
|
steps {
|
||||||
archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl', fingerprint: true, followSymlinks: false, onlyIfSuccessful: true
|
archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl, conda-bld/**/*.conda', fingerprint: true, followSymlinks: false, onlyIfSuccessful: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage("publish") {
|
stage("publish") {
|
||||||
@@ -42,6 +47,7 @@ pipeline {
|
|||||||
}
|
}
|
||||||
steps {
|
steps {
|
||||||
sh script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u ${CREDS_USR} -p ${CREDS_PSW} --non-interactive --disable-progress-bar --verbose dist/*'
|
sh script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u ${CREDS_USR} -p ${CREDS_PSW} --non-interactive --disable-progress-bar --verbose dist/*'
|
||||||
|
sh 'curl --user ${CREDS_USR}:${CREDS_PSW} --upload-file conda-bld/**/*.conda https://git.reslate.systems/api/packages/${CREDS_USR}/conda/$(basename conda-bld/**/*.conda)'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
stage ("pypi.org") {
|
stage ("pypi.org") {
|
||||||
|
14
environment.yml
Normal file
14
environment.yml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
name: base
|
||||||
|
channels:
|
||||||
|
- bioconda
|
||||||
|
- conda-forge
|
||||||
|
dependencies:
|
||||||
|
- pytest
|
||||||
|
- pytest-asyncio
|
||||||
|
- pytest-cov
|
||||||
|
- python-build
|
||||||
|
- conda-build
|
||||||
|
- twine==6.0.1
|
||||||
|
- setuptools_scm
|
||||||
|
- grayskull
|
||||||
|
- curl
|
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
name = "autoBIGS.cli"
|
name = "autoBIGS.cli"
|
||||||
dynamic = ["version"]
|
dynamic = ["version"]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
license = {file = "LICENSE"}
|
license = {text = "GPL-3.0-or-later"}
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autoBIGS-engine==0.12.*"
|
"autoBIGS-engine==0.12.*"
|
||||||
]
|
]
|
||||||
@@ -14,8 +14,8 @@ requires-python = ">=3.12"
|
|||||||
description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases."
|
description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases."
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
Repository = "https://github.com/RealYHD/autoBIGS.cli"
|
Repository = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli"
|
||||||
Issues = "https://github.com/RealYHD/autoBIGS.cli/issues"
|
Issues = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli/issues"
|
||||||
|
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
|
103
scripts/patch_recipe.py
Normal file
103
scripts/patch_recipe.py
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
from os import fdopen, path
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from sys import argv
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
INDENTATION = " "
|
||||||
|
GRAYSKULL_OUTPUT_PATH = "autoBIGS.cli"
|
||||||
|
RUN_EXPORTED_VALUE = r'{{ pin_subpackage( name|lower|replace(".", "-"), max_pin="x.x") }}'
|
||||||
|
LICENSE_SUFFIX = "-or-later"
|
||||||
|
HOME_PAGE = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli"
|
||||||
|
|
||||||
|
def _calc_indentation(line: str):
|
||||||
|
return len(re.findall(INDENTATION, line.split(line.strip())[0])) if line != "\n" else 0
|
||||||
|
|
||||||
|
def read_grayskull_output():
|
||||||
|
original_recipe = path.abspath(GRAYSKULL_OUTPUT_PATH)
|
||||||
|
original_meta = path.join(original_recipe, "meta.yaml")
|
||||||
|
meta_file = open(original_meta)
|
||||||
|
lines = meta_file.readlines()
|
||||||
|
meta_file.close()
|
||||||
|
return lines
|
||||||
|
|
||||||
|
def update_naming_scheme(lines):
|
||||||
|
modified_lines = []
|
||||||
|
for line in lines:
|
||||||
|
matches = re.finditer(r"\{\{\s*name\|lower()\s+\}\}", line)
|
||||||
|
modified_line = line
|
||||||
|
for match in matches:
|
||||||
|
modified_line = modified_line[:match.start(1)] + r'|replace(".", "-")' + modified_line[match.end(1):]
|
||||||
|
modified_lines.append(modified_line)
|
||||||
|
return modified_lines
|
||||||
|
|
||||||
|
def inject_run_exports(lines: list[str]):
|
||||||
|
package_indent = False
|
||||||
|
modified_lines = []
|
||||||
|
for line in lines:
|
||||||
|
indentation_count = _calc_indentation(line)
|
||||||
|
if line == "build:\n" and indentation_count == 0:
|
||||||
|
package_indent = True
|
||||||
|
modified_lines.append(line)
|
||||||
|
elif package_indent and indentation_count == 0:
|
||||||
|
modified_lines.append(INDENTATION*1 + "run_exports:\n")
|
||||||
|
modified_lines.append(INDENTATION*2 + "- " + RUN_EXPORTED_VALUE + "\n")
|
||||||
|
package_indent = False
|
||||||
|
else:
|
||||||
|
modified_lines.append(line)
|
||||||
|
return modified_lines
|
||||||
|
|
||||||
|
def suffix_license(lines: list[str]):
|
||||||
|
about_indent = False
|
||||||
|
modified_lines = []
|
||||||
|
for line in lines:
|
||||||
|
indentation_count = _calc_indentation(line)
|
||||||
|
if line == "about:\n" and indentation_count == 0:
|
||||||
|
about_indent = True
|
||||||
|
modified_lines.append(line)
|
||||||
|
elif about_indent and indentation_count == 1 and line.lstrip().startswith("license:"):
|
||||||
|
modified_lines.append(line.rstrip() + LICENSE_SUFFIX + "\n")
|
||||||
|
about_indent = False
|
||||||
|
else:
|
||||||
|
modified_lines.append(line)
|
||||||
|
return modified_lines
|
||||||
|
|
||||||
|
def inject_home_page(lines: list[str]):
|
||||||
|
about_indent = False
|
||||||
|
modified_lines = []
|
||||||
|
for line in lines:
|
||||||
|
indentation_count = _calc_indentation(line)
|
||||||
|
if line == "about:\n" and indentation_count == 0:
|
||||||
|
about_indent = True
|
||||||
|
modified_lines.append(line)
|
||||||
|
elif about_indent and indentation_count == 0:
|
||||||
|
modified_lines.append(INDENTATION + "home: " + HOME_PAGE + "\n")
|
||||||
|
about_indent = False
|
||||||
|
else:
|
||||||
|
modified_lines.append(line)
|
||||||
|
return modified_lines
|
||||||
|
|
||||||
|
def write_to_original(lines: list[str]):
|
||||||
|
original_recipe = path.abspath(GRAYSKULL_OUTPUT_PATH)
|
||||||
|
original_meta = path.join(original_recipe, "meta.yaml")
|
||||||
|
with open(original_meta, "w") as file:
|
||||||
|
file.writelines(lines)
|
||||||
|
|
||||||
|
def rename_recipe_dir():
|
||||||
|
new_recipe_name = path.abspath(path.join(GRAYSKULL_OUTPUT_PATH.replace(".", "-").lower()))
|
||||||
|
shutil.rmtree(new_recipe_name, ignore_errors=True)
|
||||||
|
os.replace(path.abspath(GRAYSKULL_OUTPUT_PATH), new_recipe_name)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
original_grayskull_out = read_grayskull_output()
|
||||||
|
modified_recipe_meta = None
|
||||||
|
modified_recipe_meta = update_naming_scheme(original_grayskull_out)
|
||||||
|
modified_recipe_meta = inject_run_exports(modified_recipe_meta)
|
||||||
|
modified_recipe_meta = suffix_license(modified_recipe_meta)
|
||||||
|
modified_recipe_meta = inject_home_page(modified_recipe_meta)
|
||||||
|
write_to_original(modified_recipe_meta)
|
||||||
|
rename_recipe_dir()
|
@@ -27,7 +27,7 @@ def setup_parser(parser: ArgumentParser):
|
|||||||
)
|
)
|
||||||
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--csv-prefix", "-o",
|
"--csv", "-o",
|
||||||
dest="csv_output",
|
dest="csv_output",
|
||||||
required=False,
|
required=False,
|
||||||
default=None,
|
default=None,
|
||||||
@@ -39,18 +39,21 @@ def setup_parser(parser: ArgumentParser):
|
|||||||
|
|
||||||
async def run(args: Namespace):
|
async def run(args: Namespace):
|
||||||
async with BIGSdbIndex() as bigsdb_index:
|
async with BIGSdbIndex() as bigsdb_index:
|
||||||
|
if args.list_dbs and len(args.list_bigsdb_schemas) > 0:
|
||||||
|
print("Cannot specify both database listing and schema listing, please choose one!")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
if args.list_dbs:
|
if args.list_dbs:
|
||||||
known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False)
|
known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False)
|
||||||
sorted_seqdef_dbs = [(name, source) for name, source in sorted(known_seqdef_dbs.items())]
|
sorted_seqdef_dbs = [(name, source) for name, source in sorted(known_seqdef_dbs.items())]
|
||||||
print("The following are all known BIGS database names, and their source (sorted alphabetically):")
|
print("The following are all known BIGS database names, and their source (sorted alphabetically):")
|
||||||
print("\n".join(["{0}: {1}".format(name, source) for name, source in sorted_seqdef_dbs]))
|
print("\n".join(["{0}: {1}".format(name, source) for name, source in sorted_seqdef_dbs]))
|
||||||
if args.csv_output:
|
if args.csv_output:
|
||||||
dbs_csv_path = path.splitext(args.csv_output)[0] + "_" + "dbs.csv"
|
with open(args.csv_output, "w") as csv_out_handle:
|
||||||
with open(dbs_csv_path, "w") as csv_out_handle:
|
|
||||||
writer = csv.writer(csv_out_handle)
|
writer = csv.writer(csv_out_handle)
|
||||||
writer.writerow(("BIGSdb Names", "Source"))
|
writer.writerow(("BIGSdb Names", "Source"))
|
||||||
writer.writerows(sorted_seqdef_dbs)
|
writer.writerows(sorted_seqdef_dbs)
|
||||||
print("\nDatabase output written to {0}".format(dbs_csv_path))
|
print("\nDatabase output written to {0}".format(args.csv_output))
|
||||||
|
|
||||||
for bigsdb_schema_name in args.list_bigsdb_schemas:
|
for bigsdb_schema_name in args.list_bigsdb_schemas:
|
||||||
schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name)
|
schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name)
|
||||||
@@ -58,14 +61,15 @@ async def run(args: Namespace):
|
|||||||
print("The following are the known schemas for \"{0}\", and their associated IDs:".format(bigsdb_schema_name))
|
print("The following are the known schemas for \"{0}\", and their associated IDs:".format(bigsdb_schema_name))
|
||||||
print("\n".join(["{0}: {1}".format(name, id) for name, id in sorted_schemas]))
|
print("\n".join(["{0}: {1}".format(name, id) for name, id in sorted_schemas]))
|
||||||
if args.csv_output:
|
if args.csv_output:
|
||||||
schema_csv_path = path.splitext(args.csv_output)[0] + "_" + "schemas.csv"
|
with open(args.csv_output, "w") as csv_out_handle:
|
||||||
with open(schema_csv_path, "w") as csv_out_handle:
|
|
||||||
writer = csv.writer(csv_out_handle)
|
writer = csv.writer(csv_out_handle)
|
||||||
writer.writerow(("Name", "ID"))
|
writer.writerow(("Name", "ID"))
|
||||||
writer.writerows(sorted_schemas)
|
writer.writerows(sorted_schemas)
|
||||||
print("\nSchema list output written to {0}".format(schema_csv_path))
|
print("\nSchema list output written to {0}".format(args.csv_output))
|
||||||
|
|
||||||
if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0):
|
if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0):
|
||||||
print("Nothing to do. Try specifying \"-l\" for a list of known databases, or \"-h\" for more information.")
|
print("Nothing to do. Try specifying \"-l\" for a list of known databases, or \"-h\" for more information.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
def run_asynchronously(args: Namespace):
|
def run_asynchronously(args: Namespace):
|
||||||
asyncio.run(run(args))
|
asyncio.run(run(args))
|
||||||
|
Reference in New Issue
Block a user