7 Commits

Author SHA1 Message Date
19b23539b3 Added bioconda and personal conda repos to channels
Some checks reported errors
automlst.cli/pipeline/head Something is wrong with the build of this commit
2025-02-26 15:22:32 +00:00
dbd8238cef Added recipe patching script
Some checks failed
automlst.cli/pipeline/head There was a failure building this commit
2025-02-21 14:14:02 +00:00
4b0fac0801 Added grayskull and curl to environment.yml
Some checks failed
automlst.cli/pipeline/head There was a failure building this commit
2025-02-21 06:51:20 +00:00
d78ae19c4f Re-added pytest-cov to conda environment.yml
Some checks failed
automlst.cli/pipeline/head There was a failure building this commit
2025-02-21 06:40:20 +00:00
6b8376c470 Added publishing to personal git repo
Some checks failed
automlst.cli/pipeline/head There was a failure building this commit
2025-02-21 06:33:07 +00:00
a4d8de7cc6 Changing CSV argument to --csv or -o
All checks were successful
automlst.cli/pipeline/head This commit looks good
automlst.cli/pipeline/tag This commit looks good
2025-02-19 19:57:15 +00:00
5ef5b6ac08 Updated pyproject.toml to use license text and updated repo
All checks were successful
automlst.cli/pipeline/head This commit looks good
2025-02-19 16:26:59 +00:00
8 changed files with 169 additions and 21 deletions

16
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
FROM mcr.microsoft.com/devcontainers/miniconda:1-3
# Copy environment.yml (if found) to a temp location so we update the environment. Also
# copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists.
COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/
RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \
&& rm -rf /tmp/conda-tmp
# [Optional] Uncomment to install a different version of Python than the default
# RUN conda install -y python=3.6 \
# && pip install --no-cache-dir pipx \
# && pipx reinstall-all
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>

View File

@@ -1,9 +1,11 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
// README at: https://github.com/devcontainers/templates/tree/main/src/miniconda
{
"name": "Python 3",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye",
"name": "Miniconda (Python 3)",
"build": {
"context": "..",
"dockerfile": "Dockerfile"
},
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
@@ -12,7 +14,9 @@
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "pip3 install --user -r requirements.txt && pip install -e .",
"postCreateCommand": "pip install -e .",
// Configure tool-specific properties.
"customizations": {
"vscode": {
"extensions": [
@@ -20,8 +24,6 @@
]
}
},
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"

3
.devcontainer/noop.txt Normal file
View File

@@ -0,0 +1,3 @@
This file is copied into the container along with environment.yml* from the
parent folder. This is done to prevent the Dockerfile COPY instruction from
failing if no environment.yml is found.

14
Jenkinsfile vendored
View File

@@ -2,14 +2,16 @@ pipeline {
agent {
kubernetes {
cloud 'rsys-devel'
defaultContainer 'pip'
inheritFrom 'pip'
defaultContainer 'miniforge3'
inheritFrom 'miniforge'
}
}
stages {
stage("install") {
steps {
sh 'python -m pip install -r requirements.txt'
sh 'conda config --add channels bioconda'
sh 'conda config --add channels https://git.reslate.systems/api/packages/ydeng/conda'
sh 'conda env update -n base -f environment.yml'
}
}
stage("unit tests") {
@@ -22,11 +24,14 @@ pipeline {
stage("build") {
steps {
sh "python -m build"
sh "grayskull pypi dist/*.tar.gz --maintainers 'Harrison Deng'"
sh "python scripts/patch_recipe.py"
sh 'conda build autobigs-cli -c bioconda --output-folder conda-bld --verify'
}
}
stage("archive") {
steps {
archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl', fingerprint: true, followSymlinks: false, onlyIfSuccessful: true
archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl, conda-bld/**/*.conda', fingerprint: true, followSymlinks: false, onlyIfSuccessful: true
}
}
stage("publish") {
@@ -42,6 +47,7 @@ pipeline {
}
steps {
sh script: 'python -m twine upload --repository-url https://git.reslate.systems/api/packages/ydeng/pypi -u ${CREDS_USR} -p ${CREDS_PSW} --non-interactive --disable-progress-bar --verbose dist/*'
sh 'curl --user ${CREDS_USR}:${CREDS_PSW} --upload-file conda-bld/**/*.conda https://git.reslate.systems/api/packages/${CREDS_USR}/conda/$(basename conda-bld/**/*.conda)'
}
}
stage ("pypi.org") {

14
environment.yml Normal file
View File

@@ -0,0 +1,14 @@
name: base
channels:
- bioconda
- conda-forge
dependencies:
- pytest
- pytest-asyncio
- pytest-cov
- python-build
- conda-build
- twine==6.0.1
- setuptools_scm
- grayskull
- curl

View File

@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
name = "autoBIGS.cli"
dynamic = ["version"]
readme = "README.md"
license = {file = "LICENSE"}
license = {text = "GPL-3.0-or-later"}
dependencies = [
"autoBIGS-engine==0.12.*"
]
@@ -14,8 +14,8 @@ requires-python = ">=3.12"
description = "A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases."
[project.urls]
Repository = "https://github.com/RealYHD/autoBIGS.cli"
Issues = "https://github.com/RealYHD/autoBIGS.cli/issues"
Repository = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli"
Issues = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli/issues"
[project.scripts]

103
scripts/patch_recipe.py Normal file
View File

@@ -0,0 +1,103 @@
#!/usr/bin/env python3
import argparse
from os import fdopen, path
import os
import re
import shutil
from sys import argv
import tempfile
INDENTATION = " "
GRAYSKULL_OUTPUT_PATH = "autoBIGS.cli"
RUN_EXPORTED_VALUE = r'{{ pin_subpackage( name|lower|replace(".", "-"), max_pin="x.x") }}'
LICENSE_SUFFIX = "-or-later"
HOME_PAGE = "https://github.com/Syph-and-VPD-Lab/autoBIGS.cli"
def _calc_indentation(line: str):
return len(re.findall(INDENTATION, line.split(line.strip())[0])) if line != "\n" else 0
def read_grayskull_output():
original_recipe = path.abspath(GRAYSKULL_OUTPUT_PATH)
original_meta = path.join(original_recipe, "meta.yaml")
meta_file = open(original_meta)
lines = meta_file.readlines()
meta_file.close()
return lines
def update_naming_scheme(lines):
modified_lines = []
for line in lines:
matches = re.finditer(r"\{\{\s*name\|lower()\s+\}\}", line)
modified_line = line
for match in matches:
modified_line = modified_line[:match.start(1)] + r'|replace(".", "-")' + modified_line[match.end(1):]
modified_lines.append(modified_line)
return modified_lines
def inject_run_exports(lines: list[str]):
package_indent = False
modified_lines = []
for line in lines:
indentation_count = _calc_indentation(line)
if line == "build:\n" and indentation_count == 0:
package_indent = True
modified_lines.append(line)
elif package_indent and indentation_count == 0:
modified_lines.append(INDENTATION*1 + "run_exports:\n")
modified_lines.append(INDENTATION*2 + "- " + RUN_EXPORTED_VALUE + "\n")
package_indent = False
else:
modified_lines.append(line)
return modified_lines
def suffix_license(lines: list[str]):
about_indent = False
modified_lines = []
for line in lines:
indentation_count = _calc_indentation(line)
if line == "about:\n" and indentation_count == 0:
about_indent = True
modified_lines.append(line)
elif about_indent and indentation_count == 1 and line.lstrip().startswith("license:"):
modified_lines.append(line.rstrip() + LICENSE_SUFFIX + "\n")
about_indent = False
else:
modified_lines.append(line)
return modified_lines
def inject_home_page(lines: list[str]):
about_indent = False
modified_lines = []
for line in lines:
indentation_count = _calc_indentation(line)
if line == "about:\n" and indentation_count == 0:
about_indent = True
modified_lines.append(line)
elif about_indent and indentation_count == 0:
modified_lines.append(INDENTATION + "home: " + HOME_PAGE + "\n")
about_indent = False
else:
modified_lines.append(line)
return modified_lines
def write_to_original(lines: list[str]):
original_recipe = path.abspath(GRAYSKULL_OUTPUT_PATH)
original_meta = path.join(original_recipe, "meta.yaml")
with open(original_meta, "w") as file:
file.writelines(lines)
def rename_recipe_dir():
new_recipe_name = path.abspath(path.join(GRAYSKULL_OUTPUT_PATH.replace(".", "-").lower()))
shutil.rmtree(new_recipe_name, ignore_errors=True)
os.replace(path.abspath(GRAYSKULL_OUTPUT_PATH), new_recipe_name)
if __name__ == "__main__":
original_grayskull_out = read_grayskull_output()
modified_recipe_meta = None
modified_recipe_meta = update_naming_scheme(original_grayskull_out)
modified_recipe_meta = inject_run_exports(modified_recipe_meta)
modified_recipe_meta = suffix_license(modified_recipe_meta)
modified_recipe_meta = inject_home_page(modified_recipe_meta)
write_to_original(modified_recipe_meta)
rename_recipe_dir()

View File

@@ -27,7 +27,7 @@ def setup_parser(parser: ArgumentParser):
)
parser.add_argument(
"--csv-prefix", "-o",
"--csv", "-o",
dest="csv_output",
required=False,
default=None,
@@ -39,18 +39,21 @@ def setup_parser(parser: ArgumentParser):
async def run(args: Namespace):
async with BIGSdbIndex() as bigsdb_index:
if args.list_dbs and len(args.list_bigsdb_schemas) > 0:
print("Cannot specify both database listing and schema listing, please choose one!")
exit(1)
if args.list_dbs:
known_seqdef_dbs = await bigsdb_index.get_known_seqdef_dbs(force=False)
sorted_seqdef_dbs = [(name, source) for name, source in sorted(known_seqdef_dbs.items())]
print("The following are all known BIGS database names, and their source (sorted alphabetically):")
print("\n".join(["{0}: {1}".format(name, source) for name, source in sorted_seqdef_dbs]))
if args.csv_output:
dbs_csv_path = path.splitext(args.csv_output)[0] + "_" + "dbs.csv"
with open(dbs_csv_path, "w") as csv_out_handle:
with open(args.csv_output, "w") as csv_out_handle:
writer = csv.writer(csv_out_handle)
writer.writerow(("BIGSdb Names", "Source"))
writer.writerows(sorted_seqdef_dbs)
print("\nDatabase output written to {0}".format(dbs_csv_path))
print("\nDatabase output written to {0}".format(args.csv_output))
for bigsdb_schema_name in args.list_bigsdb_schemas:
schemas = await bigsdb_index.get_schemas_for_seqdefdb(bigsdb_schema_name)
@@ -58,14 +61,15 @@ async def run(args: Namespace):
print("The following are the known schemas for \"{0}\", and their associated IDs:".format(bigsdb_schema_name))
print("\n".join(["{0}: {1}".format(name, id) for name, id in sorted_schemas]))
if args.csv_output:
schema_csv_path = path.splitext(args.csv_output)[0] + "_" + "schemas.csv"
with open(schema_csv_path, "w") as csv_out_handle:
with open(args.csv_output, "w") as csv_out_handle:
writer = csv.writer(csv_out_handle)
writer.writerow(("Name", "ID"))
writer.writerows(sorted_schemas)
print("\nSchema list output written to {0}".format(schema_csv_path))
print("\nSchema list output written to {0}".format(args.csv_output))
if not (args.list_dbs or len(args.list_bigsdb_schemas) > 0):
print("Nothing to do. Try specifying \"-l\" for a list of known databases, or \"-h\" for more information.")
exit(1)
def run_asynchronously(args: Namespace):
asyncio.run(run(args))