Initial commit, copied from autoBIGS.engine-bioconda

This commit is contained in:
Harrison Deng 2025-02-17 19:32:25 +00:00
commit 941ecb1b13
10 changed files with 281 additions and 0 deletions

16
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,16 @@
FROM mcr.microsoft.com/devcontainers/miniconda:1-3
# Copy environment.yml (if found) to a temp location so we update the environment. Also
# copy "noop.txt" so the COPY instruction does not fail if no environment.yml exists.
COPY environment.yml* .devcontainer/noop.txt /tmp/conda-tmp/
RUN if [ -f "/tmp/conda-tmp/environment.yml" ]; then umask 0002 && /opt/conda/bin/conda env update -n base -f /tmp/conda-tmp/environment.yml; fi \
&& rm -rf /tmp/conda-tmp
# [Optional] Uncomment to install a different version of Python than the default
# RUN conda install -y python=3.6 \
# && pip install --no-cache-dir pipx \
# && pipx reinstall-all
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>

View File

@ -0,0 +1,24 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/miniconda
{
"name": "Miniconda (Python 3)",
"build": {
"context": "..",
"dockerfile": "Dockerfile"
}
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "python --version",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

3
.devcontainer/noop.txt Normal file
View File

@ -0,0 +1,3 @@
This file is copied into the container along with environment.yml* from the
parent folder. This is done to prevent the Dockerfile COPY instruction from
failing if no environment.yml is found.

19
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,19 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python Debugger: Current File with Arguments",
"type": "debugpy",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"args": [
"${command:pickArgs}"
]
}
]
}

53
Jenkinsfile vendored Normal file
View File

@ -0,0 +1,53 @@
pipeline {
agent {
kubernetes {
cloud 'rsys-devel'
defaultContainer 'miniforge3'
inheritFrom 'miniforge'
}
}
stages {
stage("prepare") {
parallel {
stage("recipes repo") {
steps {
sh 'git clone https://github.com/Syph-and-VPD-Lab/bioconda-recipes.git'
dir('bioconda-recipes') {
sh 'git pull'
sh 'git pull origin update-autobigs-cli'
sh 'git checkout update-autobigs-cli'
}
}
}
stage("conda") {
steps {
sh 'conda env update -n base --file environment.yml'
}
}
}
}
stage("generate recipe") {
steps {
sh 'grayskull pypi autobigs.cli --maintainers "Harrison Deng"'
sh 'python scripts/adapt_names.py autobigs.cli'
}
}
stage("build") {
steps {
sh 'conda build autobigs-cli'
}
}
stage("commit") {
environment {
TOKEN = credentials('github.com')
}
steps {
dir('bioconda-recipes') {
sh 'cp -r ../autobigs-cli/* recipes/autobigs-cli/.'
sh 'git commit -a -m "Automatically updated autobigs-cli bioconda recipe to $(python ../scripts/package_latest_version.py autoBIGS.cli)."'
sh 'git push https://${TOKEN}@github.com/Syph-and-VPD-Lab/bioconda-recipes.git update-autobigs-cli'
}
}
}
}
}

50
autobigs-cli/meta.yaml Normal file
View File

@ -0,0 +1,50 @@
{% set name = "autoBIGS.cli" %}
{% set version = "0.4.2" %}
package:
name: {{ name|lower|replace(".", "-") }}
version: {{ version }}
source:
url: https://pypi.org/packages/source/{{ name[0] }}/{{ name }}/autobigs_cli-{{ version }}.tar.gz
sha256: fc7bf6c604974796f5c8aae52fd9ab9924f5490f4f207a26be803217aae30c82
build:
entry_points:
- autoBIGS = autobigs.cli.program:run
noarch: python
script: {{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation
number: 0
run_exports:
- {{ pin_subpackage( name|lower|replace(".", "-"), max_pin="x.x") }}
requirements:
host:
- python >=3.12
- setuptools >=64
- setuptools-scm >=8
- pip
run:
- python >=3.12
- autobigs-engine
test:
imports:
- autobigs
commands:
- pip check
- autoBIGS --help
- autoBIGS info -h
- autoBIGS st -h
requires:
- pip
about:
home: https://github.com/RealYHD/autoBIGS.cli
summary: A CLI tool to rapidly fetch fetch MLST profiles given sequences for various diseases.
license: GPL-3.0-or-later
license_file: LICENSE
extra:
recipe-maintainers:
- Harrison Deng

44
autobigs-engine/meta.yaml Normal file
View File

@ -0,0 +1,44 @@
{% set name = "autobigs.engine" %}
{% set version = "0.11.0" %}
package:
name: {{ name|lower|replace(".", "-") }}
version: {{ version }}
source:
url: https://pypi.org/packages/source/{{ name[0] }}/{{ name }}/autobigs_engine-{{ version }}.tar.gz
sha256: d479b0bce5fcf2c3eb2e666b780b55d0c8a2c03293c9200db9eeac74e869f47b
build:
noarch: python
script: {{ PYTHON }} -m pip install . -vv --no-deps --no-build-isolation
number: 0
requirements:
host:
- python >=3.12
- setuptools >=64
- setuptools-scm >=8
- pip
run:
- python >=3.12
- biopython ==1.85
- aiohttp ==3.11.*
test:
imports:
- autobigs
commands:
- pip check
requires:
- pip
about:
summary: A library to rapidly fetch fetch MLST profiles given sequences for various diseases.
dev_url: https://github.com/RealYHD/autoBIGS.engine
license: GPL-3.0
license_file: LICENSE
extra:
recipe-maintainers:
- RealYHD

10
environment.yml Normal file
View File

@ -0,0 +1,10 @@
name: base
channels:
- conda-forge
- bioconda
dependencies:
- conda-build
- grayskull
- bioconda-utils
- python=3.10
- git

32
scripts/adapt_names.py Normal file
View File

@ -0,0 +1,32 @@
#!/usr/bin/env python3
import argparse
from os import fdopen, path
import os
import re
import shutil
from sys import argv
import tempfile
def update_naming_scheme(recipe_path):
original_recipe = path.abspath(recipe_path)
original_meta = path.join(original_recipe, "meta.yaml")
new_fd, new_file_path = tempfile.mkstemp()
with fdopen(new_fd, "w") as new_file_handle:
with open(original_meta, "r") as original_file_handle:
for line in original_file_handle:
matches = re.finditer(r"\{\{\s*name\|lower()\s+\}\}", line)
modified_line = line
for match in matches:
modified_line = modified_line[:match.start(1)] + r'|replace(".", "-")' + modified_line[match.end(1):]
new_file_handle.write(modified_line)
shutil.move(new_file_path, original_meta)
new_recipe_name = path.join(
path.dirname(original_recipe),
path.basename(original_recipe).replace(".", "-").lower())
shutil.rmtree(new_recipe_name, ignore_errors=True)
os.replace(original_recipe,
new_recipe_name)
if __name__ == "__main__":
update_naming_scheme(argv[1])

View File

@ -0,0 +1,30 @@
#!/usr/bin/env python3
import sys
import requests
import json
try:
from packaging.version import parse
except ImportError:
from pip._vendor.packaging.version import parse
URL_PATTERN = 'https://pypi.python.org/pypi/{package}/json'
def get_version(package, url_pattern=URL_PATTERN):
"""Return version of package on pypi.python.org using json."""
req = requests.get(url_pattern.format(package=package))
version = parse('0')
if req.status_code == requests.codes.ok:
j = json.loads(req.text.encode(req.encoding or "utf-8"))
releases = j.get('releases', [])
for release in releases:
ver = parse(release)
if not ver.is_prerelease:
version = max(version, ver)
return version
if __name__ == '__main__':
print(get_version(sys.argv[1]), end="")