Compare commits

...

10 Commits

Author SHA1 Message Date
21d060be6a Updated CI config to match server CI
All checks were successful
csvbyname/pipeline/head This commit looks good
2024-11-14 20:48:06 +00:00
be4d665301 Updated pipeline to take advantage of latest build container image
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-05-03 08:32:04 -05:00
9e59bc097c Fixed typo in argument help
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-23 15:28:10 -05:00
2edd8a2093 Bumped package version
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-23 14:56:48 -05:00
7a400457fe Fixed double line breaks in output on Windows
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-23 14:56:29 -05:00
59cfe486aa Bumped package version
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-23 14:44:03 -05:00
266a611fea Fixed inconsistent CSV writing function
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-23 14:42:44 -05:00
cb36b8adb3 Version bump
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-21 15:56:08 -05:00
ded60aa742 Added step to test if command is runnable
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-21 15:53:07 -05:00
adf734f3c1 Added feature to add basename column to output
All checks were successful
ydeng/csvbyname/pipeline/head This commit looks good
2023-04-21 15:52:20 -05:00
8 changed files with 69 additions and 26 deletions

View File

@@ -0,0 +1,22 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
// README at: https://github.com/devcontainers/templates/tree/main/src/python
{
"name": "Python 3",
// Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile
"image": "mcr.microsoft.com/devcontainers/python:1-3.12-bullseye"
// Features to add to the dev container. More info: https://containers.dev/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "pip3 install --user -r requirements.txt",
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

5
.vscode/launch.json vendored
View File

@@ -8,19 +8,20 @@
"name": "Use Test Resources", "name": "Use Test Resources",
"type": "python", "type": "python",
"request": "launch", "request": "launch",
"program": "${workspaceFolder}/csvbyname/csvbyname.py",
"console": "integratedTerminal", "console": "integratedTerminal",
"args": [ "args": [
"${workspaceFolder}/tests/resources", "${workspaceFolder}/tests/resources",
"${workspaceFolder}/output.csv", "${workspaceFolder}/output.csv",
"-r", "-r",
"-n",
"-p", "-p",
"group_num:group(\\d)-\\w-\\d+\\.txt", "group_num:group(\\d)-\\w-\\d+\\.txt",
"group(\\d)-(?P<sect>\\w)-(?P<patid>\\d+)\\.txt", "group(\\d)-(?P<sect>\\w)-(?P<patid>\\d+)\\.txt",
"-V", "-V",
"DEBUG" "DEBUG"
], ],
"justMyCode": true "justMyCode": true,
"module": "csvbyname.cli"
} }
] ]
} }

23
Jenkinsfile vendored
View File

@@ -1,15 +1,15 @@
pipeline { pipeline {
agent any agent {
kubernetes {
cloud 'rsys-devel'
defaultContainer 'pip'
inheritFrom 'pip'
}
}
stages { stages {
stage("clean") {
steps {
sh 'rm -rf ./dist/*'
}
}
stage("install") { stage("install") {
steps { steps {
sh 'mamba env update --file environment.yml' sh 'pip install -r requirements.txt'
sh 'echo "mamba activate csvbyname" >> ~/.bashrc'
} }
} }
stage("build") { stage("build") {
@@ -17,14 +17,15 @@ pipeline {
sh "python -m build" sh "python -m build"
} }
} }
stage("test") { stage("test installation") {
steps { steps {
sh "pip install dist/*.whl" sh "pip install dist/*.whl"
sh "csvbyname -h"
} }
} }
stage("archive") { stage("archive") {
steps { steps {
archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl' archiveArtifacts artifacts: 'dist/*.tar.gz, dist/*.whl', fingerprint: true, followSymlinks: false, onlyIfSuccessful: true
} }
} }
stage("publish package") { stage("publish package") {
@@ -32,7 +33,7 @@ pipeline {
branch '**/main' branch '**/main'
} }
steps { steps {
withCredentials([usernamePassword(credentialsId: 'rs-git-package-registry-ydeng', passwordVariable: 'PASS', usernameVariable: 'USER')]) { withCredentials([usernamePassword(credentialsId: '4d6f64be-d26d-4f95-8de3-b6a9b0beb311', passwordVariable: 'PASS', usernameVariable: 'USER')]) {
sh "python -m twine upload --repository-url https://git.reslate.systems/api/packages/${USER}/pypi -u ${USER} -p ${PASS} --non-interactive --disable-progress-bar --verbose dist/*" sh "python -m twine upload --repository-url https://git.reslate.systems/api/packages/${USER}/pypi -u ${USER} -p ${PASS} --non-interactive --disable-progress-bar --verbose dist/*"
} }
} }

View File

@@ -13,7 +13,7 @@ def run(args):
args.recursive, args.recursive,
args.add_re_property, args.add_re_property,
) )
write_collected_to_csv(args.output, collected, pkeys) write_collected_to_csv(args.output, collected, pkeys, args.output_basename)
def main(): def main():
@@ -63,6 +63,17 @@ def main():
"Alternatively, use named REGEX groups.", "Alternatively, use named REGEX groups.",
nargs="+", nargs="+",
type=str, type=str,
required=True
)
argparser.add_argument(
"-n",
"--output-basename",
help='Adds a column called "basename" to the resulting CSV where it is just '
"the base name of the path instead of the entire path. This is not guaranteed "
"to be unique.",
default=False,
required=False,
action="store_true",
) )
argparser.add_argument( argparser.add_argument(
"-V", "-V",

View File

@@ -72,20 +72,24 @@ def collect_files(
def write_collected_to_csv( def write_collected_to_csv(
output_path: str, collected: dict[str, dict[str, str]], property_keys: Iterable[str] output_path: str,
collected: dict[str, dict[str, str]],
property_keys: Iterable[str],
output_basename: bool,
): ):
with open(output_path, "w") as output_fd: with open(output_path, "w", newline="", encoding="utf-8") as output_fd:
s_property_keys = sorted(property_keys) s_property_keys = sorted(property_keys)
header = ["path", *s_property_keys] header = ["path"]
if output_basename:
header.append("basename")
header.extend(s_property_keys)
writer = csv.writer(output_fd) writer = csv.writer(output_fd)
writer.writerow(header) writer.writerow(header)
for full_path, properties in collected.items(): for full_path, properties in collected.items():
writer.writerow( row = [full_path]
[ if output_basename:
full_path, row.append(os.path.basename(full_path))
*( row.extend(
properties[k] if k in properties else "N/A" (properties[k] if k in properties else "N/A" for k in s_property_keys)
for k in s_property_keys
),
]
) )
writer.writerow(row)

View File

@@ -6,3 +6,4 @@ dependencies:
- pytest=7.2.2 - pytest=7.2.2
- twine=4.0.2 - twine=4.0.2
- python=3.11 - python=3.11
prefix: ./env

3
requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
build
pytest
twine

View File

@@ -1,6 +1,6 @@
[metadata] [metadata]
name = csvbyname name = csvbyname
version = 0.0.3 version = 0.0.6
author = Harrison author = Harrison
[options] [options]