diff --git a/lib/python-beta/.gitignore b/lib/python-beta/.gitignore
deleted file mode 100644
index 98cbc105e..000000000
--- a/lib/python-beta/.gitignore
+++ /dev/null
@@ -1,133 +0,0 @@
-# Byte-compiled / optimized / DLL files
-__pycache__/
-*.py[cod]
-*$py.class
-
-# C extensions
-*.so
-
-# Distribution / packaging
-.Python
-build/
-develop-eggs/
-dist/
-downloads/
-eggs/
-.eggs/
-lib/
-lib64/
-parts/
-sdist/
-var/
-wheels/
-pip-wheel-metadata/
-share/python-wheels/
-*.egg-info/
-.installed.cfg
-*.egg
-MANIFEST
-.pypirc
-
-# PyInstaller
-# Usually these files are written by a python script from a template
-# before PyInstaller builds the exe, so as to inject date/other infos into it.
-*.manifest
-*.spec
-
-# Installer logs
-pip-log.txt
-pip-delete-this-directory.txt
-
-# Unit test / coverage reports
-htmlcov/
-.tox/
-.nox/
-.coverage
-.coverage.*
-.cache
-nosetests.xml
-coverage.xml
-*.cover
-*.py,cover
-.hypothesis/
-.pytest_cache/
-
-# Translations
-*.mo
-*.pot
-
-# Django stuff:
-*.log
-local_settings.py
-db.sqlite3
-db.sqlite3-journal
-
-# Flask stuff:
-instance/
-.webassets-cache
-
-# Scrapy stuff:
-.scrapy
-
-# Sphinx documentation
-docs/_build/
-
-# PyBuilder
-target/
-
-# Jupyter Notebook
-.ipynb_checkpoints
-
-# IPython
-profile_default/
-ipython_config.py
-
-# pyenv
-.python-version
-
-# pipenv
-# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
-# However, in case of collaboration, if having platform-specific dependencies or dependencies
-# having no cross-platform support, pipenv may install dependencies that don't work, or not
-# install all needed dependencies.
-#Pipfile.lock
-
-# PEP 582; used by e.g. github.com/David-OConnor/pyflow
-__pypackages__/
-
-# Celery stuff
-celerybeat-schedule
-celerybeat.pid
-
-# SageMath parsed files
-*.sage.py
-
-# Environments
-.env
-.venv
-env/
-venv/
-ENV/
-env.bak/
-venv.bak/
-
-# Spyder project settings
-.spyderproject
-.spyproject
-
-# Rope project settings
-.ropeproject
-
-# mkdocs documentation
-/site
-
-# mypy
-.mypy_cache/
-.dmypy.json
-dmypy.json
-
-# Pyre type checker
-.pyre/
-
-# VS Code
-.vscode/
diff --git a/lib/python-beta/.pre-commit-config.yaml b/lib/python-beta/.pre-commit-config.yaml
deleted file mode 100644
index d957f567f..000000000
--- a/lib/python-beta/.pre-commit-config.yaml
+++ /dev/null
@@ -1,60 +0,0 @@
-ci:
- autoupdate_commit_msg: 'chore: update pre-commit hooks'
- autofix_commit_msg: 'style: pre-commit fixes'
-
-repos:
- - repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
- hooks:
- - id: check-added-large-files
- - id: check-case-conflict
- - id: check-merge-conflict
- - id: check-symlinks
- - id: check-yaml
- - id: debug-statements
- - id: end-of-file-fixer
- - id: mixed-line-ending
- - id: requirements-txt-fixer
- - id: trailing-whitespace
-
- - repo: https://github.com/PyCQA/isort
- rev: 5.12.0
- hooks:
- - id: isort
- args: ['-a', 'from __future__ import annotations']
-
- - repo: https://github.com/asottile/pyupgrade
- rev: v3.15.0
- hooks:
- - id: pyupgrade
- args: [--py37-plus]
-
- - repo: https://github.com/hadialqattan/pycln
- rev: v2.4.0
- hooks:
- - id: pycln
- args: [--config=pyproject.toml]
- stages: [manual]
-
- - repo: https://github.com/codespell-project/codespell
- rev: v2.2.6
- hooks:
- - id: codespell
-
- - repo: https://github.com/pre-commit/pygrep-hooks
- rev: v1.10.0
- hooks:
- - id: python-check-blanket-noqa
- - id: python-check-blanket-type-ignore
- - id: python-no-log-warn
- - id: python-no-eval
- - id: python-use-type-annotations
- - id: rst-backticks
- - id: rst-directive-colons
- - id: rst-inline-touching-normal
-
- - repo: https://github.com/psf/black-pre-commit-mirror
- rev: 23.11.0
- hooks:
- - id: black
- args: [--line-length=120]
diff --git a/lib/python-beta/README.md b/lib/python-beta/README.md
deleted file mode 100644
index 0f5235bf1..000000000
--- a/lib/python-beta/README.md
+++ /dev/null
@@ -1,110 +0,0 @@
-# Bailo Python Client
-
-A simple Python API Wrapper for Bailo
-
-
-
-
-
- Table of Contents
-
- -
- Key Features
-
- -
- Installing
-
- -
- Getting Started
-
- -
- Development
-
-
-
-
-
-
-
-## Key Features
-
-- Uploading and downloading model binaries
-
-## Installing
-
-**Python 3.8.1 or higher is required**
-
-```bash
-pip install bailo
-```
-
-## Getting Started
-
-```python
-from bailo import Client, Model
-client = Client("http://localhost:8080")
-
-# Create a model
-yolo = Model.create(
- client=client,
- name="YoloV4",
- description="You only look once!",
- team_id="Uncategorised"
-)
-
-yolo.card_from_schema("minimal-general-v10-beta")
-
-# Create a new release
-my_release = yolo.create_release(version="0.1.0",
- notes="Beta")
-
-# Upload a file to the release
-with open("yolo.onnx") as f:
- my_release.upload("yolo", f)
-```
-
-## Documentation
-
-Documenation is rendered with Sphinx and served [here](https://gchq.github.io/Bailo/docs/python/index.html).
-
-### Building locally
-
-From the docs directory run either `make html` or `make.bat` on Windows. This will build it in the backend directory by
-default.
-
-## Development
-
-### Install and add precommit
-
-If already working on Bailo you may be prompted to overwrite Husky. Follow the instructions given by Git CLI.
-
-```bash
-pip install pre-commit
-pre-commit install
-```
-
-### Install the package locally
-
-```bash
-pip install -e .
-```
-
-### Testing
-
-The package uses Pytest to test packages. Tests can be ran accordingly from within this directory. Tests are split into
-categories sections for automation purposes.
-
-In order to run integration tests make sure Bailo is running on `https://localhost:8080`:
-
-```bash
-pytest -m integration
-```
-
-Run all other tests:
-
-```bash
-pytest
-```
diff --git a/lib/python-beta/docs/Makefile b/lib/python-beta/docs/Makefile
deleted file mode 100644
index f13636a2d..000000000
--- a/lib/python-beta/docs/Makefile
+++ /dev/null
@@ -1,22 +0,0 @@
-# Minimal makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line, and also
-# from the environment for the first two.
-SPHINXOPTS ?=
-SPHINXBUILD ?= sphinx-build
-SOURCEDIR = .
-BUILDDIR = _build
-BACKENDDIR = ../../../backend/python-docs
-
-# Put it first so that "make" without argument is like "make help".
-help:
- @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
-
-.PHONY: help Makefile
-
-# Catch-all target: route all unknown targets to Sphinx using the new
-# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
-%: Makefile
- @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
- cp -R $(BUILDDIR) $(BACKENDDIR)
\ No newline at end of file
diff --git a/lib/python-beta/docs/make.bat b/lib/python-beta/docs/make.bat
deleted file mode 100644
index 0ddad078e..000000000
--- a/lib/python-beta/docs/make.bat
+++ /dev/null
@@ -1,37 +0,0 @@
-@ECHO OFF
-
-pushd %~dp0
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set SOURCEDIR=.
-set BUILDDIR=_build
-set BACKENDDIR=..\..\..\backend\python-docs
-
-%SPHINXBUILD% >NUL 2>NUL
-if errorlevel 9009 (
- echo.
- echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
- echo.installed, then set the SPHINXBUILD environment variable to point
- echo.to the full path of the 'sphinx-build' executable. Alternatively you
- echo.may add the Sphinx directory to PATH.
- echo.
- echo.If you don't have Sphinx installed, grab it from
- echo.https://www.sphinx-doc.org/
- exit /b 1
-)
-
-if "%1" == "" goto help
-
-%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-Xcopy %BUILDDIR% %BACKENDDIR%
-goto end
-
-:help
-%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
-
-:end
-popd
diff --git a/lib/python/.gitignore b/lib/python/.gitignore
index f2de0e3ed..98cbc105e 100644
--- a/lib/python/.gitignore
+++ b/lib/python/.gitignore
@@ -1,6 +1,133 @@
-__pycache__
-bailoclient.egg-info
-settings.json
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+.pypirc
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
.env
-venv
-docs/build
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# VS Code
+.vscode/
diff --git a/lib/python/.pre-commit-config.yaml b/lib/python/.pre-commit-config.yaml
index d3fe233f4..d957f567f 100644
--- a/lib/python/.pre-commit-config.yaml
+++ b/lib/python/.pre-commit-config.yaml
@@ -1,25 +1,60 @@
+ci:
+ autoupdate_commit_msg: 'chore: update pre-commit hooks'
+ autofix_commit_msg: 'style: pre-commit fixes'
+
repos:
- - repo: https://github.com/psf/black
- rev: 22.3.0
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
hooks:
- - id: black
+ - id: check-added-large-files
+ - id: check-case-conflict
+ - id: check-merge-conflict
+ - id: check-symlinks
+ - id: check-yaml
+ - id: debug-statements
+ - id: end-of-file-fixer
+ - id: mixed-line-ending
+ - id: requirements-txt-fixer
+ - id: trailing-whitespace
+
+ - repo: https://github.com/PyCQA/isort
+ rev: 5.12.0
+ hooks:
+ - id: isort
+ args: ['-a', 'from __future__ import annotations']
+
+ - repo: https://github.com/asottile/pyupgrade
+ rev: v3.15.0
+ hooks:
+ - id: pyupgrade
+ args: [--py37-plus]
- - repo: local
+ - repo: https://github.com/hadialqattan/pycln
+ rev: v2.4.0
hooks:
- - id: pylint
- name: pylint
- entry: pylint
- language: system
- types: [python]
- verbose: true
- args: ['--exit-zero', 'bailoclient']
+ - id: pycln
+ args: [--config=pyproject.toml]
+ stages: [manual]
- - repo: https://github.com/kynan/nbstripout
- rev: 0.4.0
+ - repo: https://github.com/codespell-project/codespell
+ rev: v2.2.6
hooks:
- - id: nbstripout
- name: nbstripout - Strip outputs from notebooks (auto-fixes)
- args:
- - --extra-keys
- - 'metadata.colab metadata.kernelspec cell.metadata.colab cell.metadata.executionInfo cell.metadata.id
- cell.metadata.outputId'
+ - id: codespell
+
+ - repo: https://github.com/pre-commit/pygrep-hooks
+ rev: v1.10.0
+ hooks:
+ - id: python-check-blanket-noqa
+ - id: python-check-blanket-type-ignore
+ - id: python-no-log-warn
+ - id: python-no-eval
+ - id: python-use-type-annotations
+ - id: rst-backticks
+ - id: rst-directive-colons
+ - id: rst-inline-touching-normal
+
+ - repo: https://github.com/psf/black-pre-commit-mirror
+ rev: 23.11.0
+ hooks:
+ - id: black
+ args: [--line-length=120]
diff --git a/lib/python/.pylintrc b/lib/python/.pylintrc
deleted file mode 100644
index f7a6d0ea7..000000000
--- a/lib/python/.pylintrc
+++ /dev/null
@@ -1,615 +0,0 @@
-[MAIN]
-
-# Analyse import fallback blocks. This can be used to support both Python 2 and
-# 3 compatible code, which means that the block might have code that exists
-# only in one or another interpreter, leading to false positives when analysed.
-analyse-fallback-blocks=no
-
-# Load and enable all available extensions. Use --list-extensions to see a list
-# all available extensions.
-#enable-all-extensions=
-
-# In error mode, messages with a category besides ERROR or FATAL are
-# suppressed, and no reports are done by default. Error mode is compatible with
-# disabling specific errors.
-#errors-only=
-
-# Always return a 0 (non-error) status code, even if lint errors are found.
-# This is primarily useful in continuous integration scripts.
-#exit-zero=
-
-# A comma-separated list of package or module names from where C extensions may
-# be loaded. Extensions are loading into the active Python interpreter and may
-# run arbitrary code.
-extension-pkg-allow-list=
-
-# A comma-separated list of package or module names from where C extensions may
-# be loaded. Extensions are loading into the active Python interpreter and may
-# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
-# for backward compatibility.)
-extension-pkg-whitelist=
-
-# Return non-zero exit code if any of these messages/categories are detected,
-# even if score is above --fail-under value. Syntax same as enable. Messages
-# specified are enabled, while categories only check already-enabled messages.
-fail-on=
-
-# Specify a score threshold to be exceeded before program exits with error.
-fail-under=10
-
-# Interpret the stdin as a python script, whose filename needs to be passed as
-# the module_or_package argument.
-#from-stdin=
-
-# Files or directories to be skipped. They should be base names, not paths.
-ignore=CVS
-
-# Add files or directories matching the regex patterns to the ignore-list. The
-# regex matches against paths and can be in Posix or Windows format.
-ignore-paths=
-
-# Files or directories matching the regex patterns are skipped. The regex
-# matches against base names, not paths. The default value ignores Emacs file
-# locks
-ignore-patterns=^\.#
-
-# List of module names for which member attributes should not be checked
-# (useful for modules/projects where namespaces are manipulated during runtime
-# and thus existing member attributes cannot be deduced by static analysis). It
-# supports qualified module names, as well as Unix pattern matching.
-ignored-modules=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
-# number of processors available to use, and will cap the count on Windows to
-# avoid hangs.
-jobs=1
-
-# Control the amount of potential inferred values when inferring a single
-# object. This can help the performance when dealing with large functions or
-# complex, nested conditions.
-limit-inference-results=100
-
-# List of plugins (as comma separated values of python module names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# Minimum Python version to use for version dependent checks. Will default to
-# the version used to run pylint.
-py-version=3.7
-
-# Discover python modules and packages in the file system subtree.
-recursive=no
-
-# When enabled, pylint would attempt to guess common misconfiguration and emit
-# user-friendly hints instead of false-positive error messages.
-suggestion-mode=yes
-
-# Allow loading of arbitrary C extensions. Extensions are imported into the
-# active Python interpreter and may run arbitrary code.
-unsafe-load-any-extension=no
-
-# In verbose mode, extra non-checker-related info will be displayed.
-#verbose=
-
-
-[REPORTS]
-
-# Python expression which should return a score less than or equal to 10. You
-# have access to the variables 'fatal', 'error', 'warning', 'refactor',
-# 'convention', and 'info' which contain the number of messages in each
-# category, as well as 'statement' which is the total number of statements
-# analyzed. This score is used by the global evaluation report (RP0004).
-evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))
-
-# Template used to display messages. This is a python new-style format string
-# used to format the message information. See doc for all details.
-msg-template=
-
-# Set the output format. Available formats are text, parseable, colorized, json
-# and msvs (visual studio). You can also give a reporter class, e.g.
-# mypackage.mymodule.MyReporterClass.
-#output-format=
-
-# Tells whether to display a full report or only the messages.
-reports=no
-
-# Activate the evaluation score.
-score=yes
-
-
-[MESSAGES CONTROL]
-
-# Only show warnings with the listed confidence levels. Leave empty to show
-# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE,
-# UNDEFINED.
-confidence=HIGH,
- CONTROL_FLOW,
- INFERENCE,
- INFERENCE_FAILURE,
- UNDEFINED
-
-# Disable the message, report, category or checker with the given id(s). You
-# can either give multiple identifiers separated by comma (,) or put this
-# option multiple times (only on the command line, not in the configuration
-# file where it should appear only once). You can also use "--disable=all" to
-# disable everything first and then re-enable specific checks. For example, if
-# you want to run only the similarities checker, you can use "--disable=all
-# --enable=similarities". If you want to run only the classes checker, but have
-# no Warning level messages displayed, use "--disable=all --enable=classes
-# --disable=W".
-disable=raw-checker-failed,
- bad-inline-option,
- locally-disabled,
- file-ignored,
- suppressed-message,
- useless-suppression,
- deprecated-pragma,
- use-symbolic-message-instead,
- too-many-arguments,
- too-many-return-statements,
- too-many-boolean-expressions,
- line-too-long,
- too-few-public-methods,
-
-# Enable the message, report, category or checker with the given id(s). You can
-# either give multiple identifier separated by comma (,) or put this option
-# multiple time (only on the command line, not in the configuration file where
-# it should appear only once). See also the "--disable" option for examples.
-enable=c-extension-no-member
-
-
-[DESIGN]
-
-# List of regular expressions of class ancestor names to ignore when counting
-# public methods (see R0903)
-exclude-too-few-public-methods=
-
-# List of qualified class names to ignore when counting class parents (see
-# R0901)
-ignored-parents=
-
-# Maximum number of arguments for function / method.
-max-args=5
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=7
-
-# Maximum number of boolean expressions in an if statement (see R0916).
-max-bool-expr=5
-
-# Maximum number of branch for function / method body.
-max-branches=12
-
-# Maximum number of locals for function / method body.
-max-locals=15
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=20
-
-# Maximum number of return / yield for function / method body.
-max-returns=6
-
-# Maximum number of statements in function / method body.
-max-statements=50
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-
-[EXCEPTIONS]
-
-# Exceptions that will emit a warning when caught.
-overgeneral-exceptions=BaseException,
- Exception
-
-
-[FORMAT]
-
-# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
-expected-line-ending-format=
-
-# Regexp for a line that is allowed to be longer than the limit.
-ignore-long-lines=^\s*(# )??$
-
-# Number of spaces of indent required inside a hanging or continued line.
-indent-after-paren=4
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab).
-indent-string=' '
-
-# Maximum number of characters on a single line.
-max-line-length=100
-
-# Maximum number of lines in a module.
-max-module-lines=1000
-
-# Allow the body of a class to be on the same line as the declaration if body
-# contains single statement.
-single-line-class-stmt=no
-
-# Allow the body of an if to be on the same line as the test if there is no
-# else.
-single-line-if-stmt=no
-
-
-[IMPORTS]
-
-# List of modules that can be imported at any level, not just the top level
-# one.
-allow-any-import-level=
-
-# Allow wildcard imports from modules that define __all__.
-allow-wildcard-with-all=no
-
-# Deprecated modules which should not be used, separated by a comma.
-deprecated-modules=
-
-# Output a graph (.gv or any supported image format) of external dependencies
-# to the given file (report RP0402 must not be disabled).
-ext-import-graph=
-
-# Output a graph (.gv or any supported image format) of all (i.e. internal and
-# external) dependencies to the given file (report RP0402 must not be
-# disabled).
-import-graph=
-
-# Output a graph (.gv or any supported image format) of internal dependencies
-# to the given file (report RP0402 must not be disabled).
-int-import-graph=
-
-# Force import order to recognize a module as part of the standard
-# compatibility libraries.
-known-standard-library=
-
-# Force import order to recognize a module as part of a third party library.
-known-third-party=enchant
-
-# Couples of modules and preferred modules, separated by a comma.
-preferred-modules=
-
-
-[LOGGING]
-
-# The type of string formatting that logging methods do. `old` means using %
-# formatting, `new` is for `{}` formatting.
-logging-format-style=old
-
-# Logging modules to check that the string format arguments are in logging
-# function parameter format.
-logging-modules=logging
-
-
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,
- XXX,
- TODO
-
-# Regular expression of note tags to take in consideration.
-notes-rgx=
-
-
-[SIMILARITIES]
-
-# Comments are removed from the similarity computation
-ignore-comments=yes
-
-# Docstrings are removed from the similarity computation
-ignore-docstrings=yes
-
-# Imports are removed from the similarity computation
-ignore-imports=yes
-
-# Signatures are removed from the similarity computation
-ignore-signatures=yes
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-
-[SPELLING]
-
-# Limits count of emitted suggestions for spelling mistakes.
-max-spelling-suggestions=4
-
-# Spelling dictionary name. Available dictionaries: none. To make it work,
-# install the 'python-enchant' package.
-spelling-dict=
-
-# List of comma separated words that should be considered directives if they
-# appear at the beginning of a comment and should not be checked.
-spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
-
-# List of comma separated words that should not be checked.
-spelling-ignore-words=
-
-# A path to a file that contains the private dictionary; one word per line.
-spelling-private-dict-file=
-
-# Tells whether to store unknown words to the private dictionary (see the
-# --spelling-private-dict-file option) instead of raising a message.
-spelling-store-unknown-words=no
-
-
-[STRING]
-
-# This flag controls whether inconsistent-quotes generates a warning when the
-# character used as a quote delimiter is used inconsistently within a module.
-check-quote-consistency=no
-
-# This flag controls whether the implicit-str-concat should generate a warning
-# on implicit string concatenation in sequences defined over several lines.
-check-str-concat-over-line-jumps=no
-
-
-[TYPECHECK]
-
-# List of decorators that produce context managers, such as
-# contextlib.contextmanager. Add to this list to register other decorators that
-# produce valid context managers.
-contextmanager-decorators=contextlib.contextmanager
-
-# List of members which are set dynamically and missed by pylint inference
-# system, and so shouldn't trigger E1101 when accessed. Python regular
-# expressions are accepted.
-generated-members=
-
-# Tells whether to warn about missing members when the owner of the attribute
-# is inferred to be None.
-ignore-none=yes
-
-# This flag controls whether pylint should warn about no-member and similar
-# checks whenever an opaque object is returned when inferring. The inference
-# can return multiple potential results while evaluating a Python object, but
-# some branches might not be evaluated, which results in partial inference. In
-# that case, it might be useful to still emit no-member and other checks for
-# the rest of the inferred objects.
-ignore-on-opaque-inference=yes
-
-# List of symbolic message names to ignore for Mixin members.
-ignored-checks-for-mixins=no-member,
- not-async-context-manager,
- not-context-manager,
- attribute-defined-outside-init
-
-# List of class names for which member attributes should not be checked (useful
-# for classes with dynamically set attributes). This supports the use of
-# qualified names.
-ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace
-
-# Show a hint with possible names when a member name was not found. The aspect
-# of finding the hint is based on edit distance.
-missing-member-hint=yes
-
-# The minimum edit distance a name should have in order to be considered a
-# similar match for a missing member name.
-missing-member-hint-distance=1
-
-# The total number of similar names that should be taken in consideration when
-# showing a hint for a missing member.
-missing-member-max-choices=1
-
-# Regex pattern to define which classes are considered mixins.
-mixin-class-rgx=.*[Mm]ixin
-
-# List of decorators that change the signature of a decorated function.
-signature-mutators=
-
-
-[VARIABLES]
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid defining new builtins when possible.
-additional-builtins=
-
-# Tells whether unused global variables should be treated as a violation.
-allow-global-unused-variables=yes
-
-# List of names allowed to shadow builtins
-allowed-redefined-builtins=
-
-# List of strings which can identify a callback function by name. A callback
-# name must start or end with one of those strings.
-callbacks=cb_,
- _cb
-
-# A regular expression matching the name of dummy variables (i.e. expected to
-# not be used).
-dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
-
-# Argument names that match this expression will be ignored. Default to name
-# with leading underscore.
-ignored-argument-names=_.*|^ignored_|^unused_
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# List of qualified module names which can have objects that can redefine
-# builtins.
-redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
-
-
-[BASIC]
-
-# Naming style matching correct argument names.
-argument-naming-style=snake_case
-
-# Regular expression matching correct argument names. Overrides argument-
-# naming-style. If left empty, argument names will be checked with the set
-# naming style.
-#argument-rgx=
-
-# Naming style matching correct attribute names.
-attr-naming-style=snake_case
-
-# Regular expression matching correct attribute names. Overrides attr-naming-
-# style. If left empty, attribute names will be checked with the set naming
-# style.
-#attr-rgx=
-
-# Bad variable names which should always be refused, separated by a comma.
-bad-names=foo,
- bar,
- baz,
- toto,
- tutu,
- tata
-
-# Bad variable names regexes, separated by a comma. If names match any regex,
-# they will always be refused
-bad-names-rgxs=
-
-# Naming style matching correct class attribute names.
-class-attribute-naming-style=any
-
-# Regular expression matching correct class attribute names. Overrides class-
-# attribute-naming-style. If left empty, class attribute names will be checked
-# with the set naming style.
-#class-attribute-rgx=
-
-# Naming style matching correct class constant names.
-class-const-naming-style=UPPER_CASE
-
-# Regular expression matching correct class constant names. Overrides class-
-# const-naming-style. If left empty, class constant names will be checked with
-# the set naming style.
-#class-const-rgx=
-
-# Naming style matching correct class names.
-class-naming-style=PascalCase
-
-# Regular expression matching correct class names. Overrides class-naming-
-# style. If left empty, class names will be checked with the set naming style.
-#class-rgx=
-
-# Naming style matching correct constant names.
-const-naming-style=UPPER_CASE
-
-# Regular expression matching correct constant names. Overrides const-naming-
-# style. If left empty, constant names will be checked with the set naming
-# style.
-#const-rgx=
-
-# Minimum line length for functions/classes that require docstrings, shorter
-# ones are exempt.
-docstring-min-length=-1
-
-# Naming style matching correct function names.
-function-naming-style=snake_case
-
-# Regular expression matching correct function names. Overrides function-
-# naming-style. If left empty, function names will be checked with the set
-# naming style.
-#function-rgx=
-
-# Good variable names which should always be accepted, separated by a comma.
-good-names=i,
- j,
- k,
- ex,
- Run,
- _
-
-# Good variable names regexes, separated by a comma. If names match any regex,
-# they will always be accepted
-good-names-rgxs=
-
-# Include a hint for the correct naming format with invalid-name.
-include-naming-hint=no
-
-# Naming style matching correct inline iteration names.
-inlinevar-naming-style=any
-
-# Regular expression matching correct inline iteration names. Overrides
-# inlinevar-naming-style. If left empty, inline iteration names will be checked
-# with the set naming style.
-#inlinevar-rgx=
-
-# Naming style matching correct method names.
-method-naming-style=snake_case
-
-# Regular expression matching correct method names. Overrides method-naming-
-# style. If left empty, method names will be checked with the set naming style.
-#method-rgx=
-
-# Naming style matching correct module names.
-module-naming-style=snake_case
-
-# Regular expression matching correct module names. Overrides module-naming-
-# style. If left empty, module names will be checked with the set naming style.
-#module-rgx=
-
-# Colon-delimited sets of names that determine each other's naming style when
-# the name regexes allow several styles.
-name-group=
-
-# Regular expression which should only match function or class names that do
-# not require a docstring.
-no-docstring-rgx=^_
-
-# List of decorators that produce properties, such as abc.abstractproperty. Add
-# to this list to register other decorators that produce valid properties.
-# These decorators are taken in consideration only for invalid-name.
-property-classes=abc.abstractproperty
-
-# Regular expression matching correct type variable names. If left empty, type
-# variable names will be checked with the set naming style.
-#typevar-rgx=
-
-# Naming style matching correct variable names.
-variable-naming-style=snake_case
-
-# Regular expression matching correct variable names. Overrides variable-
-# naming-style. If left empty, variable names will be checked with the set
-# naming style.
-#variable-rgx=
-
-
-[CLASSES]
-
-# Warn about protected attribute access inside special methods
-check-protected-access-in-special-methods=no
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,
- __new__,
- setUp,
- __post_init__
-
-# List of member names, which should be excluded from the protected access
-# warning.
-exclude-protected=_asdict,
- _fields,
- _replace,
- _source,
- _make
-
-# List of valid names for the first argument in a class method.
-valid-classmethod-first-arg=cls
-
-# List of valid names for the first argument in a metaclass class method.
-valid-metaclass-classmethod-first-arg=cls
-
-
-[REFACTORING]
-
-# Maximum number of nested blocks for function / method body
-max-nested-blocks=5
-
-# Complete name of functions that never returns. When checking for
-# inconsistent-return-statements if a never returning function is called then
-# it will be considered as an explicit return statement and no message will be
-# printed.
-never-returning-functions=sys.exit,argparse.parse_error
diff --git a/lib/python/Makefile b/lib/python/Makefile
deleted file mode 100644
index 0d01a84c1..000000000
--- a/lib/python/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-PYTHON := /usr/bin/python3
-
-
-install:
- sudo yum install gcc gmp python3-devel
- $(PYTHON) -m pip install -r requirements.txt
-
-test:
- $(PYTHON) -m pytest --log-cli-level=WARNING -vv --ignore=tests/e2e/
-
-e2e_test:
- $(PYTHON) -m pytest tests/e2e --log-cli-level=WARNING -vv
diff --git a/lib/python/README.md b/lib/python/README.md
index 29ad26e1b..0f5235bf1 100644
--- a/lib/python/README.md
+++ b/lib/python/README.md
@@ -1,226 +1,110 @@
# Bailo Python Client
-## Table of Contents
-
-1. [Installation](#installation)
-2. [Documentation](#documentation)
-3. [Authentication](#authentication)
-4. [Client](#client-usage)
-5. [Development Setup](#development-setup)
-
-## Installation
-
-### Dependencies
-
-This library requires some additional requirements to build
-[pycryptodome](https://pycryptodome.readthedocs.io/en/latest/src/installation.html).
-
-```bash
-# RPM / Fedora Based
-sudo yum install gcc gmp python3-devel
-
-# Ubuntu
-sudo apt-get install build-essential python3-dev
-```
-
-### Client Installation
+A simple Python API Wrapper for Bailo
+
+
+
+
+
+ Table of Contents
+
+ -
+ Key Features
+
+ -
+ Installing
+
+ -
+ Getting Started
+
+ -
+ Development
+
+
+
+
+
+
+
+## Key Features
+
+- Uploading and downloading model binaries
+
+## Installing
+
+**Python 3.8.1 or higher is required**
```bash
-python3 -m pip install .
+pip install bailo
```
-## Documentation
-
-Install dev dependencies and build the documentation
-
-```bash
-python3 -m pip install -r requirements.txt
-cd docs
-make html
-```
-
-To view the docs open `docs/build/html/index.html` in your browser.
-
-## Authentication
-
-Multiple types of authentication are supported, each needing a config object:
-
-### Cognito
+## Getting Started
```python
-from bailoclient import CognitoConfig
-
-config = CognitoConfig(
- username="username",
- password="password",
- user_pool_id="user-pool-id",
- client_id="client-id",
- client_secret="secret",
- region="region",
+from bailo import Client, Model
+client = Client("http://localhost:8080")
+
+# Create a model
+yolo = Model.create(
+ client=client,
+ name="YoloV4",
+ description="You only look once!",
+ team_id="Uncategorised"
)
-```
-
-### PKI
-```python
-from bailoclient import Pkcs12Config
+yolo.card_from_schema("minimal-general-v10-beta")
-config = Pkcs12Config(
- pkcs12_filename="path/to/file.pem",
- pkcs12_password="password"
-)
-```
+# Create a new release
+my_release = yolo.create_release(version="0.1.0",
+ notes="Beta")
-> To avoid exposing the certificate's password use `bailoclient.create_pki_client` to be promted to enter the password
-
-### Null auth
-
-If the Bailo instance is not configured with access control (not recommended), simply use `None`
-
-```python
-config = None
+# Upload a file to the release
+with open("yolo.onnx") as f:
+ my_release.upload("yolo", f)
```
-### Loading Authentication config from environment variables
-
-```python
-from bailoclient import CognitoConfig, Pkcs12Config
-
-config = CognitoConfig.from_env()
-config = Pkcs12Config.from_env()
-```
-
-Please refer to the documentation for environment variables needed by each config type.
-
-## Client Usage
-
-There are two ways to interact with a Bailo instance:
-
-### `bailoclient.Client` Example
-
-This class makes available all the functionality to interact with a bailo instance. There are three client creation
-function available to quickly create a `bailoclient.Client` instance.
-
-```python
-from bailoclient import create_pki_client
-
-client = create_pki_client(
- p12_file="path/p12/file.pem",
- bailo_url="https://bailo.io"
-)
-
-client.get_my_models()
-```
-
-### `bailoclient.Bailo` Example
-
-This class has all the functionality of `bailoclient.client` with additional functionality to improve the user
-experience for data scientists. Additional functionality includes making the model bundlers available and generating
-requirements files from python files.
-
-```python
-from bailoclient import Bailo, BailoConfig, Pkcs12Config
-
-auth = Pkcs12Config(...)
-bailo = Bailo(
- config=BailoConfig(
- auth=auth,
- bailo_url="https://bailo.io",
- ca_cert="path/to/ca",
- )
-)
-
-bailo.get_my_models()
-```
-
-### Loading Bailo config from environment variables:
-
-Please refer to the documentation for environment variables needed by each config type.
-
-```python
-from bailoclient import Bailo, BailoConfig, AuthType
+## Documentation
-config = BailoConfig.from_env(auth_type=AuthType.PKI) # or AuthType.PKI, AuthType.NULL
-bailo = Bailo(config)
+Documenation is rendered with Sphinx and served [here](https://gchq.github.io/Bailo/docs/python/index.html).
-bailo.get_my_models()
-```
+### Building locally
-### Saving and Loading Bailo Config
+From the docs directory run either `make html` or `make.bat` on Windows. This will build it in the backend directory by
+default.
-```python
-from bailoclient import Bailo, BailoConfig, Pkcs12Config
+## Development
-auth = Pkcs12Config(...)
-config=BailoConfig(
- auth=auth,
- bailo_url="https://bailo.io",
- ca_cert="path/to/ca",
-)
-config.save(config_path="./bailo-config.yaml")
-
-bailo = Bailo(config=BailoConfig.load("./bailo-config.yaml"))
-bailo.get_my_models()
-```
-
-### Example Config
-
-Example yaml configuration `config.yaml`
-
-```yaml
-api:
- bailo_url: 'http://example.com'
- ca_verify: 'path/to/ca/cert'
- timeout_period: 5
- aws_gateway: 'True'
- auth:
- username: username
- password: password
- user_pool_id: 'USER_POOL_ID'
- client_id: 'APP_CLIENT_ID'
- client_secret: 'APP_CLIENT_SECRET'
- region: 'AWS_REGION'
-```
+### Install and add precommit
-## Development setup
-
-### Creating an environment with conda
-
-This requires anaconda or miniconda to be installed. Create and activate an environment by:
+If already working on Bailo you may be prompted to overwrite Husky. Follow the instructions given by Git CLI.
```bash
-conda create -n bailo python=3.10
-conda activate bailo
-pip install -r requirments.txt
+pip install pre-commit
+pre-commit install
```
-### Creating an environment with venv
-
-This requires and existing python installation and pip installed. Create and activate an environment by:
+### Install the package locally
```bash
-python3 -m venv venv
-source venv/bin/activate
-pip install -r requirements.txt
+pip install -e .
```
-### Install dependencies
-
-```bash
-python3 -m pip install -r requirements.txt
-pre-commit install
-```
+### Testing
-### Running Tests
+The package uses Pytest to test packages. Tests can be ran accordingly from within this directory. Tests are split into
+categories sections for automation purposes.
-To run the tests, run the following from the top-level directory of the Bailo Client `Bailo/lib/python`:
+In order to run integration tests make sure Bailo is running on `https://localhost:8080`:
```bash
-make test
+pytest -m integration
```
-To run the end-to-end tests:
+Run all other tests:
```bash
-make e2e-test
+pytest
```
diff --git a/lib/python/bailoclient/__init__.py b/lib/python/bailoclient/__init__.py
deleted file mode 100644
index 68c72ac62..000000000
--- a/lib/python/bailoclient/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-"""Bailo Client"""
-
-import logging
-import sys
-
-from bailoclient.bailo import Bailo
-from bailoclient.client import (
- Client,
- create_cognito_client,
- create_null_client,
- create_pki_client,
-)
-from bailoclient.config import BailoConfig, Pkcs12Config, CognitoConfig
-from bailoclient.enums import AuthType
-
-logging.basicConfig(stream=sys.stdout, level=logging.INFO)
diff --git a/lib/python/bailoclient/bailo.py b/lib/python/bailoclient/bailo.py
deleted file mode 100644
index a32e6407c..000000000
--- a/lib/python/bailoclient/bailo.py
+++ /dev/null
@@ -1,164 +0,0 @@
-"""Bailo client"""
-
-import json
-import os
-from copy import deepcopy
-from typing import Union, List, Optional, Any
-
-from pkg_resources import resource_filename
-
-from bailoclient.client import Client
-from bailoclient.config import BailoConfig
-from bailoclient.model_handlers import Bundler, Loader
-from bailoclient.enums import ModelFlavour
-
-
-class Bailo(Client):
- """
- Bailo class. This class provides some additional functionality to the Client class.
- """
-
- def __init__(self, config: Union[os.PathLike, str, BailoConfig]):
- self.bundler = Bundler()
- self.loader = Loader()
-
- if isinstance(config, (os.PathLike, str)):
- config = BailoConfig.load(config)
-
- elif not isinstance(config, BailoConfig):
- raise ValueError("The provided config is not valid")
-
- super().__init__(config=config)
-
- with open(
- resource_filename("bailoclient", "resources/minimal_metadata.json")
- ) as json_file:
- self._minimal_metadata = json.load(json_file)
-
- with open(
- resource_filename(
- "bailoclient", "resources/minimal_deployment_metadata.json"
- )
- ) as json_file:
- self._minimal_deployment_metadata = json.load(json_file)
-
- @property
- def bundlers(self):
- """Get list of available bundler flavours"""
- return list(self.bundler.bundler_functions.keys())
-
- @property
- def templates(self):
- """Get list of available template flavours"""
- return list(Bundler.model_py_templates.keys())
-
- @property
- def flavours(self):
- """Get list of available model flavours"""
- return [flavour.value for flavour in ModelFlavour.__members__.values()]
-
- @property
- def minimal_metadata(self):
- """Get a copy of Bailo's model minimal metadata schema"""
- return deepcopy(self._minimal_metadata)
-
- @property
- def minimal_deployment_metadata(self):
- """Get a copy of Bailo's deployment minimal metadata schema"""
- return deepcopy(self._minimal_deployment_metadata)
-
- def bundle_model(
- self,
- output_path: str,
- model: Optional[Any] = None,
- model_binary: Optional[str] = None,
- model_py: Optional[str] = None,
- model_requirements: Optional[str] = None,
- requirements_files_path: Optional[str] = None,
- model_flavour: Optional[str] = None,
- additional_files: Optional[List[str]] = None,
- ):
- """Bundle model files into the required structure for the code.zip and binary.zip
- for uploading to BAILO.
-
- To save and bundle a model object, provide the model object and the model_flavour.
- You may need to have MLflow installed to use some of the bundlers.
-
- To bundle a pre-saved model, you will need to provide the model_binary and either the
- model_code or model_flavour as a minimum. If you are not providing model_code, the
- model_flavour is used to get the appropriate model template to bundle with your model.
-
- Args:
- output_path: Path to output code.zip and binary.zip files to
- model: Model object to save via bundler function. To see available bundlers, see bundlers property Defaults to None.
- model_binary: Path to model binary. Can be a file or directory. Defaults to None.
- model_py: Path to model.py file. If not provided, you must provide a model flavour. To see available templates, use templates property. Defaults to None.
- model_requirements: Path to requirements.txt file OR path to a Python file, module or notebook from which to generate the requirements.txt. Defaults to None.
- requirements_files_path:
- model_flavour: Name of the flavour of model. Supported flavours can be seen with the flavours property. Defaults to None.
- additional_files: List of file paths of additional dependencies or directories of dependencies for the model. Defaults to None.
- """
-
- output_path = os.path.abspath(output_path)
-
- if model_binary:
- model_binary = os.path.abspath(model_binary)
-
- if model_py:
- model_py = os.path.abspath(model_py)
-
- if model_requirements:
- model_requirements = os.path.abspath(model_requirements)
-
- if requirements_files_path:
- requirements_files_path = os.path.abspath(requirements_files_path)
-
- if additional_files:
- additional_files = [os.path.abspath(file) for file in additional_files]
-
- self.bundler.bundle_model(
- output_path=output_path,
- model=model,
- model_binary=model_binary,
- model_py=model_py,
- model_requirements=model_requirements,
- requirements_files_path=requirements_files_path,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- def load_model(self, model_path: str, model_flavour: str):
- """Load a model into memory. You must provide the path to the model file
- and the library that the model was developed with (the model flavour) so
- that the appropriate loader function can be used.
-
- Args:
- model_path: Path to the actual model file (e.g. './model.pth')
- model_flavour: Flavour of the model (e.g. 'torch')
-
- Returns:
- Model: The loaded model
- """
- return self.loader.load_model(model_path, model_flavour)
-
- def generate_requirements_file(self, module_path: str, output_path: str):
- """Generate requirements.txt file based on imports within a Notebook, Python file,
- or Python project. Output_dir must be a directory.
-
- Args:
- module_path: Path to the Python file used to generate requirements.txt
- output_path: Output path in format output/path
- """
-
- module_path = os.path.normpath(module_path)
- output_path = os.path.normpath(output_path)
-
- if not output_path.endswith("requirements.txt"):
- output_path = os.path.join(output_path, "requirements.txt")
-
- output_dir = os.path.dirname(output_path)
-
- if not os.path.exists(output_dir):
- os.makedirs(output_dir, exist_ok=True)
-
- self.bundler.generate_requirements_file(module_path, output_path)
diff --git a/lib/python/bailoclient/client/__init__.py b/lib/python/bailoclient/client/__init__.py
deleted file mode 100644
index e7fd4abdd..000000000
--- a/lib/python/bailoclient/client/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-""" Client for communincating with a Bailo instance """
-
-from bailoclient.client.client import (
- Client,
- create_cognito_client,
- create_pki_client,
- create_null_client,
-)
diff --git a/lib/python/bailoclient/client/auth.py b/lib/python/bailoclient/client/auth.py
deleted file mode 100644
index 926ddbb17..000000000
--- a/lib/python/bailoclient/client/auth.py
+++ /dev/null
@@ -1,166 +0,0 @@
-"""All authenticators"""
-
-import abc
-from typing import Dict, Optional
-
-from pycognito.aws_srp import AWSSRP
-
-from bailoclient.config import AuthenticationConfig, CognitoConfig
-from bailoclient.exceptions import UnauthorizedException
-
-
-class AuthenticationInterface(abc.ABC):
- """Abstract base class for Authentication"""
-
- @abc.abstractmethod
- def __init__(self, config: AuthenticationConfig):
- """Initialise an authentication method from config"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def authenticate_user(self, *args, **kwargs) -> bool:
- """Authenticate the user. Returns False if the authentication fails
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- bool: True if authentication is successful
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def is_authenticated(self) -> bool:
- """Returns True if the user is authenticated
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- bool: True if the user is authenticated
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def get_authorisation_headers(self) -> Optional[Dict[str, str]]:
- """Authenticate and get the required headers that can be used to send an API request.
- Return None if the authentication fails
- Note that this interface will definitely change once more auth types are explored.
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- Optional[Dict[str, str]]: Authorisation headers or None if authentication fails
- """
-
- raise NotImplementedError()
-
-
-class NullAuthenticator(AuthenticationInterface):
- """Dummy class that doesn't actually do any authentication"""
-
- def __init__(self, config: AuthenticationConfig = None):
- """Initialise an authentication method from config"""
-
- def authenticate_user(self, *args, **kwargs) -> bool:
- """Authenticate the user. Returns False if the authentication fails
-
- Returns:
- bool: True if authentication is successful
- """
- return True
-
- def is_authenticated(self) -> bool:
- """ "Returns True if the user is authenticated
-
- Returns:
- bool: True if authenticated
- """
- return True
-
- def get_authorisation_headers(self) -> Optional[Dict[str, str]]:
- """Authenticate and get a secure token than can be used to send an API request.
- Return None if the authentication fails
- Note that this interface will definitely change once more auth types are explored.
-
- Returns:
- Optional[Dict[str, str]]: Authentication headers or None if authentication fails
- """
- return {}
-
-
-class Pkcs12Authenticator(NullAuthenticator):
- """
- Dummy class that doesn't actually do any authentication, but using this class
- allows the API to know to use the P12 certificate in the config to make requests
- """
-
-
-class CognitoSRPAuthenticator(AuthenticationInterface):
- """Authentication implementation for Cognito SRP using username/password"""
-
- def __init__(self, config: CognitoConfig):
- if not isinstance(config, CognitoConfig):
- raise ValueError("Cognito authentication requires a CognitoConfig instance")
-
- self.config = config
- self.authentication_result = None
- self._authenticated = False
-
- def __try_authorise(self) -> Dict[str, str]:
- """
- Call the AWS Cognito API and try to authenticate with username and password.
- Returns the response object
- """
- aws = AWSSRP(
- username=self.config.username,
- password=self.config.password,
- pool_id=self.config.user_pool_id,
- client_id=self.config.client_id,
- client_secret=self.config.client_secret,
- pool_region=self.config.region,
- )
- return aws.authenticate_user()
-
- def authenticate_user(self, *args, **kwargs) -> bool:
- """
- Authenticate the user using AWS Cognito. Returns False if the authentication fails
-
- Returns:
- bool: True if authentication is successful
- """
-
- response = self.__try_authorise(self.config.username, self.config.password)
-
- if (
- "AuthenticationResult" in response
- and "AccessToken" in response["AuthenticationResult"]
- ):
- self.authentication_result = response["AuthenticationResult"]
- self._authenticated = True
- return True
-
- return False
-
- def is_authenticated(self) -> bool:
- """ "Returns True if the user is authenticated
-
- Returns:
- bool: True if authenticated
- """
- return self._authenticated
-
- def get_authorisation_headers(self) -> Optional[Dict[str, str]]:
- """Authenticate and get a secure token than can be used to send an API request.
- Return None if the authentication fails
- Note that this interface will definitely change once more auth types are explored.
-
- Returns:
- Optional[Dict[str, str]]: AWS access token if auth is successful, else None
- """
-
- if not self.authentication_result:
- raise UnauthorizedException("Authenticator not yet authorised.")
-
- return {"Authorization": "Bearer " + self.authentication_result["AccessToken"]}
diff --git a/lib/python/bailoclient/client/client.py b/lib/python/bailoclient/client/client.py
deleted file mode 100644
index 6ce075bcf..000000000
--- a/lib/python/bailoclient/client/client.py
+++ /dev/null
@@ -1,608 +0,0 @@
-"""Client class for connecting and interacting with a Bailo instance"""
-
-import getpass
-import json
-import os
-from datetime import datetime
-from glob import glob
-from typing import Union, Dict, List, Optional
-
-from pkg_resources import resource_filename
-from requests_toolbelt.multipart.encoder import MultipartEncoder
-
-from bailoclient.client.http import RequestsAdapter
-from bailoclient.models import Model, User
-from bailoclient.exceptions import (
- CannotIncrementVersion,
- DeploymentNotFound,
- InvalidFileRequested,
- UserNotFound,
-)
-from bailoclient.client.utils import generate_payload
-from bailoclient.client.validation import (
- too_large_for_gateway,
- deployment_matches,
- validate_uploads,
-)
-from bailoclient.config import CognitoConfig, Pkcs12Config, BailoConfig
-
-
-class Client:
- """Client interface for interacting with API"""
-
- def __init__(self, config: BailoConfig):
- self.api = RequestsAdapter(config)
- self.connection_params = None
- self.config = config
-
- def get_model_schema(self, model_uuid: str) -> Dict:
- """Get schema for a model by its UUID
-
- Args:
- model_uuid: the external UUID of a model.
-
- Returns:
- dict: The schema associated with a given model
- """
- return self.api.get(f"/model/{model_uuid}/schema")
-
- def get_upload_schemas(self) -> List:
- """Get list of available model schemas
-
- Returns:
- list: List of model schemas
- """
- return self.api.get("/schemas?use=UPLOAD")
-
- def get_deployment_schemas(self) -> List:
- """Get list of deployment schemas
-
- Returns:
- List: List of deployment schemas
- """
- return self.api.get("/schemas?use=DEPLOYMENT")
-
- def get_users(self) -> List[User]:
- """Get list of users
-
- Returns:
- List: List of User objects
- """
- return [User(user_data) for user_data in self.api.get("/users")["users"]]
-
- def get_me(self) -> User:
- """Get current user
-
- Returns:
- User: current user
- """
- return User(self.api.get("/user"))
-
- def get_user_by_name(self, name: str) -> User:
- """Get particular user by name
-
- Args:
- name: Name of user
-
- Raises:
- UserNotFound: The user could not be found
-
- Returns:
- User: User with given name
- """
- users = self.get_users()
- for user in users:
- if user.id == name:
- return user
- raise UserNotFound(f"{name}")
-
- def download_model_files(
- self,
- deployment_uuid: str,
- model_version: str,
- file_type: Optional[str] = None,
- output_dir: str = "./model/",
- overwrite: bool = False,
- ):
- """Download the code or binary for a model. file_type can either be 'binary' or 'code'.
-
- Args:
- deployment_uuid: UUID of the deployment
- model_version: Version of the model
- file_type: Model files to download. Either 'code' or 'binary'.
- output_dir: Output directory for file downloads. Defaults to "./model/".
- overwrite: Whether to overwrite an existing folder with download. Defaults to False.
-
- Raises:
- InvalidFileRequested: Invalid file type - must be 'code' or 'binary'
- FileExistsError: File already exists at filepath. Overwrite must be specified to overwrite.
-
- Returns:
- str: Response status code
- """
-
- if file_type and file_type not in ["code", "binary"]:
- raise InvalidFileRequested(
- "Invalid file_type provided - file_type can either be 'code' or 'binary'"
- )
-
- if glob(output_dir) and not overwrite:
- raise FileExistsError(
- "A folder already exists at this location. Use overwrite=True if you want to overwrite the existing folder."
- )
-
- if not file_type:
- code_response = self.api.get(
- f"/deployment/{deployment_uuid}/version/{model_version}/raw/code",
- output_dir=output_dir,
- )
-
- binary_response = self.api.get(
- f"/deployment/{deployment_uuid}/version/{model_version}/raw/binary",
- output_dir=output_dir,
- )
-
- return code_response, binary_response
-
- return self.api.get(
- f"/deployment/{deployment_uuid}/version/{model_version}/raw/{file_type}",
- output_dir=output_dir,
- )
-
- def get_deployment_by_uuid(self, deployment_uuid: str) -> Dict:
- """Get deployment by deployment UUID
-
- Args:
- deployment_uuid: Deployment UUID
-
- Returns:
- dict: Deployment
- """
- return self.api.get(f"/deployment/{deployment_uuid}")
-
- def get_user_deployments(self, user_id: str) -> List[Dict]:
- """Get deployments for a given user
-
- Args:
- user_id: ID of the user
-
- Returns:
- list[dict]: Deployments for user
- """
- return self.api.get(f"/deployment/user/{user_id}")
-
- def get_my_deployments(self) -> List[Dict]:
- """Get deployments for the current user
-
- Returns:
- list[dict]: Deployments for the current user
- """
- return self.get_user_deployments(self.get_me()._id)
-
- def find_my_deployment(
- self,
- deployment_name: str,
- model_uuid: str,
- model_version: str = None,
- ) -> Dict:
- """Find a particular deployment belonging to the current user. If multiple matching deployments are found, return the most recent deployment.
-
- Args:
- deployment_name: Name of the deployment
- model_uuid: UUID of the model associated with the deployment
- model_version: Version of the model that the deployment was created for. Defaults to None.
-
- Returns:
- dict: Matching deployment
- """
-
- user_deployments = self.get_my_deployments()
-
- if not user_deployments:
- raise DeploymentNotFound("You do not currently have any deployments.")
-
- matching_deployments = [
- deployment
- for deployment in user_deployments
- if deployment_matches(
- deployment, deployment_name, model_uuid, model_version
- )
- ]
-
- if not matching_deployments:
- raise DeploymentNotFound(
- "Could not find any deployments for the current user matching the provided criteria."
- )
-
- if len(matching_deployments) == 1:
- return matching_deployments[0]
-
- timestamps = [
- datetime.strptime(
- deployment["metadata"]["timeStamp"], "%Y-%m-%dT%H:%M:%S.%fZ"
- )
- for deployment in matching_deployments
- ]
- latest = timestamps.index(max(timestamps))
-
- return matching_deployments[latest]
-
- def __model(self, model: dict) -> Model:
- """Create Model with schema
-
- Args:
- model: Model data returned from API
-
- Returns:
- Model: Model class object
- """
- return Model(model, _schema=self.get_model_schema(model["uuid"]))
-
- def get_models(
- self,
- filter_str: str = "",
- ) -> List[Model]:
- """Get list of all models. Optional to filter by filter string
-
- Args:
- filter_str: String to filter models. Defaults to "".
-
- Returns:
- List: List of Models
- """
-
- return [
- self.__model(model_metadata)
- for model_metadata in self.api.get(f"/models?type=all&filter={filter_str}")[
- "models"
- ]
- ]
-
- def get_favourite_models(
- self,
- filter_str: str = "",
- ) -> List[Model]:
- """Get list of favourite models. Optional to filter by filter string
-
- Args:
- filter_str: String to filter models. Defaults to "".
-
- Returns:
- List: List of Models
- """
-
- return [
- self.__model(model_metadata)
- for model_metadata in self.api.get(
- f"/models?type=favourites&filter={filter_str}"
- )["models"]
- ]
-
- def get_my_models(
- self,
- filter_str: str = "",
- ) -> List[Model]:
- """Get list of models for the current user. Optional to filter by filter string
-
- Args:
- filter_str: String to filter models. Defaults to "".
-
- Returns:
- List: List of Models
- """
-
- return [
- self.__model(model_metadata)
- for model_metadata in self.api.get(
- f"/models?type=user&filter={filter_str}"
- )["models"]
- ]
-
- def get_model_card(self, model_uuid: str, model_version: str = None) -> Dict:
- """Get a model by its UUID. Optionally retrieve a specific version of a model.
-
- Args:
- model_uuid: Model UUID
- model_version: Model version name/number. Defaults to None.
-
- Returns:
- dict: Requested model
- """
-
- if model_version:
- return self.__model(
- self.api.get(f"model/{model_uuid}/version/{model_version}")
- )
-
- return self.__model(self.api.get(f"model/uuid/{model_uuid}"))
-
- def _get_model_card_by_id(self, model_id: str) -> Dict:
- """Internal method to retrieve model card by its internal ID (e.g. 62d9abb7e5eb14ee63823618)
-
- Args:
- model_id: Internal model ID
-
- Returns:
- dict: Requested model
- """
- return self.__model(self.api.get(f"model/id/{model_id}"))
-
- def get_model_versions(self, model_uuid: str) -> List[Dict]:
- """Get all versions of a model
-
- Args:
- model_uuid: Model UUID
-
- Returns:
- List[dict]: List of versions
- """
- return self.api.get(f"model/{model_uuid}/versions")
-
- def get_model_deployments(self, model_uuid: str) -> List[Dict]:
- """Get all deployments of a model
-
- Args:
- model_uuid: Model UUID
-
- Returns:
- List[dict]: List of deployments of the model
- """
- return self.api.get(f"model/{model_uuid}/deployments")
-
- def upload_model(self, metadata: dict, binary_file: str, code_file: str) -> str:
- """Upload a new model
-
- Args:
- metadata: Required metadata for upload
- binary_file: Path to model binary file
- code_file: Path to model code file
-
- Returns:
- str: UUID of the new model
-
- Raises:
- ValueError: Payload is too large for the AWS gateway (if using)
- """
-
- metadata_json = json.dumps(metadata)
-
- validate_uploads(
- binary_file=binary_file,
- code_file=code_file,
- metadata=metadata,
- minimal_metadata_path=resource_filename(
- "bailoclient", "resources/minimal_metadata.json"
- ),
- )
-
- payload = generate_payload(metadata_json, binary_file, code_file)
-
- if too_large_for_gateway(payload, self.config.aws_gateway):
- raise ValueError(
- "Payload too large; JWT Auth running through AWS Gateway (10M limit)"
- )
-
- return self._post_model(payload)
-
- def update_model(
- self,
- metadata: dict,
- model_uuid: str,
- binary_file: str,
- code_file: str,
- ) -> str:
- """Update an existing model based on its UUID.
-
- Args:
- metadata: Updated model metadata
- model_uuid: UUID of model to update
- binary_file: Path to the model binary file
- code_file: Path to the model code file
-
- Returns:
- str: UUID of the updated model
-
- Raises:
- ValueError: Payload is too large for the AWS gateway (if using)
- """
-
- metadata_json = json.dumps(metadata)
-
- validate_uploads(
- binary_file=binary_file,
- code_file=code_file,
- metadata=metadata,
- minimal_metadata_path=resource_filename(
- "bailoclient", "resources/minimal_metadata.json"
- ),
- )
-
- payload = generate_payload(metadata_json, binary_file, code_file)
-
- if too_large_for_gateway(payload, self.config.aws_gateway):
- raise ValueError(
- "Payload too large; JWT Auth running through AWS Gateway (10M limit)"
- )
-
- return self._post_model(
- model_data=payload, mode="newVersion", model_uuid=model_uuid
- )
-
- def request_deployment(self, metadata: dict):
- """Request a new deployment of a model
-
- Args:
- metadata: Deployment metadata. See deployment.json for minimal metadata required.
- """
- validate_uploads(
- metadata=metadata,
- minimal_metadata_path=resource_filename(
- "bailoclient", "resources/minimal_deployment_metadata.json"
- ),
- )
-
- metadata_json = json.dumps(metadata)
-
- return self.api.post(
- "/deployment",
- request_body=metadata_json,
- headers={"Content-Type": "application/json"},
- )
-
- def _post_model(
- self,
- model_data: MultipartEncoder,
- mode: str = "newModel",
- model_uuid: Optional[str] = None,
- ) -> str:
- """Post a new model or an updated model
-
- Args:
- model_data: encoded payload for uploading
- mode: newModel or newVersion. Defaults to "newModel".
- model_uuid: Model UUID if updating an existing model. Defaults to None.
-
- Raises:
- ValueError: Invalid mode
-
- Returns:
- str: Model UUID
- """
-
- if mode == "newVersion":
- return self.api.post(
- f"/model?mode={mode}&modelUuid={model_uuid}",
- request_body=model_data,
- headers={"Content-Type": model_data.content_type},
- )
-
- if mode == "newModel":
- return self.api.post(
- f"/model",
- request_body=model_data,
- headers={"Content-Type": model_data.content_type},
- )
-
- raise ValueError("Invalid mode - must be either newVersion or newModel")
-
- def _increment_model_version(self, model_uuid: str) -> str:
- """Increment the latest version of a model by 1
-
- Args:
- model_uuid: UUID of the model
-
- Returns:
- str: incremented version number
- """
-
- model_versions = self.api.get(f"model/{model_uuid}/versions")
- try:
- model_versions = [
- int(model_version["version"]) for model_version in model_versions
- ]
- except ValueError as exc:
- raise (
- CannotIncrementVersion(
- "Please manually provide an updated version number"
- )
- ) from exc
-
- latest_version = max(model_versions)
-
- return str(latest_version + 1)
-
-
-def create_cognito_client(
- bailo_url: str,
- username: str,
- password: str,
- user_pool_id: str,
- client_id: str,
- client_secret: str,
- region: str,
- ca_verify: Union[bool, str] = True,
- aws_gateway: bool = True,
-) -> Client:
- """Create an authorised Cognito client
-
- Args:
- bailo_url: URL of the Bailo instance
- username: Cognito username
- password: Cognito password
- user_pool_id: Cognito user pool ID
- client_id: Cognito client ID
- client_secret: Cognito client secret
- region: Cognito region
- ca_verify: Verify SSL certificates. Provide a path to use a custom cert
- aws_gateway: Is Bailo load balanced with an aws gateway
-
- Returns:
- Client: Authorised Bailo Client
- """
-
- cognito_config = CognitoConfig(
- username=username,
- password=password,
- user_pool_id=user_pool_id,
- client_id=client_id,
- client_secret=client_secret,
- region=region,
- )
-
- config = BailoConfig(
- auth=cognito_config,
- bailo_url=bailo_url,
- ca_verify=ca_verify,
- aws_gateway=aws_gateway,
- )
-
- return Client(config)
-
-
-def create_pki_client(
- p12_file: str,
- bailo_url: str,
- ca_verify: Union[str, bool] = True,
- aws_gateway: bool = True,
-) -> Client:
- """Create an authorised PKI client
-
- Args:
- p12_file: Path to P12 file
- ca_verify: Path to CA file
- bailo_url: URL of the Bailo instance
- aws_gateway: Is Bailo load balanced with an aws gateway
-
-
- Returns:
- Client: Authorised Bailo Client
- """
- p12_pwd = getpass.getpass(
- prompt=f"Enter your password for {os.getenv('p12_file')}: "
- )
-
- pki_config = Pkcs12Config(pkcs12_filename=p12_file, pkcs12_password=p12_pwd)
- config = BailoConfig(
- auth=pki_config, bailo_url=url, ca_verify=ca_verify, aws_gateway=aws_gateway
- )
-
- return Client(config)
-
-
-def create_null_client(
- bailo_url: str, ca_verify: Union[str, bool] = True, aws_gateway: bool = True
-):
- """Create an unauthorised client
-
- Args:
- bailo_url: URL of the Bailo instance
- ca_verify: Path to CA file
- aws_gateway: Is Bailo load balanced with an aws gateway
-
- Returns:
- Client: Bailo Client
- """
- config = BailoConfig(
- auth=None, bailo_url=bailo_url, ca_verify=ca_verify, aws_gateway=aws_gateway
- )
- return Client(config)
diff --git a/lib/python/bailoclient/client/http.py b/lib/python/bailoclient/client/http.py
deleted file mode 100644
index 44101d999..000000000
--- a/lib/python/bailoclient/client/http.py
+++ /dev/null
@@ -1,203 +0,0 @@
-"""Adapters for Client to use for communicating over HTTP to a Bailo instance"""
-
-import abc
-from typing import Dict, Optional
-
-import requests
-import requests_pkcs12
-
-from bailoclient.client.auth import (
- Pkcs12Authenticator,
- CognitoSRPAuthenticator,
- NullAuthenticator,
-)
-from bailoclient.client.utils import (
- get_headers,
- handle_response,
- form_url,
- handle_reconnect,
-)
-from bailoclient.config import BailoConfig, Pkcs12Config, CognitoConfig
-
-
-class HttpInterface(abc.ABC):
- """API interface"""
-
- @abc.abstractmethod
- def __init__(self, config: BailoConfig):
- raise NotImplementedError
-
- def _connect(self) -> bool:
- """Authenticate with the BailoAPI. Returns True if successful
-
- Returns:
- bool: authenticated
- """
- raise NotImplementedError
-
- @abc.abstractmethod
- def get(
- self,
- request_path: str,
- request_params: Optional[Dict[str, str]],
- headers: Optional[Dict] = None,
- output_dir: Optional[None] = None,
- ) -> Dict[str, str]:
- """Make a GET request against the API.
- This will not do any validation of parameters prior to sending.
-
- Args:
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_params: Any query parameters to be passed to the API. Defaults to None.
- headers: request headers. Defaults to None.
- output_dir: path to directory to write output
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- Dict[str, str]: A JSON object returned by the API.
- Returns an empty dictionary if the request fails.
- """
- raise NotImplementedError
-
- @abc.abstractmethod
- def post(
- self,
- request_path: str,
- request_body: Dict,
- request_params: Optional[Dict[str, str]] = None,
- headers: Optional[Dict] = None,
- ) -> Dict[str, str]:
- """Make a POST request against the API.
- This will not do any validation of parameters prior to sending.
-
- Args:
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_body: The full request body as a dict
- request_params: Any query parameters to be passed to the API. Defaults to None.
- headers: request headers. Defaults to None.
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- Dict[str, str]: A JSON object returned by the API.
- Returns an empty dictionary if the request fails.
- """
- raise NotImplementedError
-
- @abc.abstractmethod
- def put(
- self,
- request_path: str,
- request_body: Dict,
- request_params: Optional[Dict[str, str]] = None,
- headers: Optional[Dict] = None,
- ) -> Dict[str, str]:
- """Make a PUT request against the API.
- This will not do any validation of parameters prior to sending.
-
- Args:
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_body: The full request body as a dict
- request_params: Any query parameters to be passed to the API. Defaults to None.
- headers: request headers. Defaults to None.
-
- Raises:
- NotImplementedError: Abstract method must be implemented
-
- Returns:
- Dict[str, str]: A JSON object returned by the API.
- Returns an empty dictionary if the request fails.
- """
- raise NotImplementedError
-
-
-class RequestsAdapter(HttpInterface):
- """HTTP Adapter to communicate to Bailo using requests based libraries"""
-
- def __init__(self, config: BailoConfig):
- self._bailo_url = config.bailo_url
- self._default_params = {
- "timeout": config.timeout_period,
- "verify": config.ca_verify,
- }
-
- if isinstance(config.auth, Pkcs12Config):
- self._auth = Pkcs12Authenticator(config.auth)
- self._requests_module = requests_pkcs12
- self._default_params.update(
- {
- "pkcs12_filename": config.auth.pkcs12_filename,
- "pkcs12_password": config.auth.pkcs12_password,
- }
- )
-
- elif isinstance(config.auth, CognitoConfig):
- self._auth = CognitoSRPAuthenticator(config.auth)
- self._requests_module = requests
-
- elif config.auth is None:
- self._auth = NullAuthenticator(config.auth)
- self._requests_module = requests
-
- else:
- raise ValueError(
- "Could not identify the authentication type from the config"
- )
-
- self._connect()
-
- def _connect(self) -> bool:
- return self._auth.authenticate_user()
-
- @handle_reconnect
- def get(
- self,
- request_path: str,
- request_params: Optional[Dict[str, str]] = None,
- headers: Optional[Dict] = None,
- output_dir: Optional[str] = None,
- ) -> Dict[str, str]:
- response = self._requests_module.get(
- form_url(self._bailo_url, request_path),
- params=request_params,
- headers=get_headers(self._auth, headers),
- **self._default_params,
- )
- return handle_response(response, output_dir)
-
- @handle_reconnect
- def post(
- self,
- request_path: str,
- request_body: Dict,
- request_params: Optional[Dict[str, str]] = None,
- headers: Optional[Dict] = None,
- ) -> Dict[str, str]:
- response = self._requests_module.post(
- form_url(self._bailo_url, request_path),
- data=request_body,
- params=request_params,
- headers=get_headers(self._auth, headers),
- **self._default_params,
- )
- return handle_response(response)
-
- @handle_reconnect
- def put(
- self,
- request_path: str,
- request_body: Dict,
- request_params: Optional[Dict[str, str]] = None,
- headers: Optional[Dict] = None,
- ) -> Dict[str, str]:
- response = self._requests_module.put(
- form_url(self._bailo_url, request_path),
- data=request_body,
- params=request_params,
- headers=get_headers(self._auth, headers),
- **self._default_params,
- )
- return handle_response(response)
diff --git a/lib/python/bailoclient/client/utils.py b/lib/python/bailoclient/client/utils.py
deleted file mode 100644
index 1755662f7..000000000
--- a/lib/python/bailoclient/client/utils.py
+++ /dev/null
@@ -1,199 +0,0 @@
-"""Utility functions for use in the client module"""
-
-import mimetypes
-from functools import wraps
-from json import JSONDecodeError
-from typing import Dict, Optional, Callable, Union, List
-import io
-import logging
-import os
-import shutil
-import zipfile
-
-from requests.models import Response
-from requests_toolbelt import MultipartEncoder
-
-from bailoclient.exceptions import UnauthorizedException, UnconnectedClient
-from bailoclient.client.auth import AuthenticationInterface
-
-logger = logging.getLogger(__name__)
-
-
-def get_headers(
- auth: AuthenticationInterface, input_headers: Optional[Dict] = None
-) -> Dict[str, str]:
- """
- Merge request and auth headers into single dict for making a request
-
- Args:
- auth: Auth instance to generate any needed authheaders
- input_headers: Non auth request headers
-
- Returnds:
- Dict: merged headers
- """
- if input_headers:
- input_headers.update(auth.get_authorisation_headers())
- return input_headers
- return auth.get_authorisation_headers()
-
-
-def form_url(base_url, request_path: str) -> str:
- """Combine the bailo base_url with the path to a resource
-
- Args:
- base_url: url of the bailo instance
- request_path: path of the resource a request is being made to
-
- Returns:
- str: combined resource url
- """
- if request_path.startswith("/"):
- return f"{base_url}{request_path}"
-
- return f"{base_url}/{request_path}"
-
-
-def _decode_file_content(content: bytes, output_dir: str):
- """Decode zipfile bytes from HttpResponse into model files
-
- Args:
- content: Content from the API response
- output_dir: The directory to save the zip file to
- """
- with zipfile.ZipFile(io.BytesIO(content)) as archive:
- archive.extractall(output_dir)
-
- if os.path.exists(f"{output_dir}/__MACOSX"):
- shutil.rmtree(f"{output_dir}/__MACOSX")
-
-
-def get_file_name(path: str):
- """Get the filename from a path
-
- Args:
- path: path to the file to get name of
-
- Returns:
- str: name of the file
- """
- return os.path.basename(path)
-
-
-def get_mime_type(path: str) -> str:
- """Get the mimetype of a file
-
- Args:
- path: path to file to get the mime type of
-
- Returns:
- str: mime type of the file
- """
- return mimetypes.guess_type(path)[0]
-
-
-def generate_payload(
- metadata: dict, binary_file: str, code_file: str
-) -> MultipartEncoder:
- """Generate payload for posting model or deployment
-
- Args:
- metadata: Model metadata
- binary_file: Path to model binary file
- code_file: Path to model code file
-
- Returns:
- MultipartEncoder: Payload of model data
- """
- payloads = [("metadata", metadata)]
- payloads = _add_files_to_payload(payloads, binary_file, code_file)
-
- return MultipartEncoder(payloads)
-
-
-def _add_files_to_payload(payloads: List, binary_file: str, code_file: str) -> List:
- """Add code and binary files to the payload
-
- Args:
- payloads: List of payloads
- binary_file: File path of binary
- code_file: File path of code
- """
- for tag, full_filename in zip(["code", "binary"], [code_file, binary_file]):
- f_name = get_file_name(full_filename)
- mtype = get_mime_type(full_filename)
- with open(full_filename, "rb") as file:
- payloads.append((tag, (f_name, file.read(), mtype)))
-
- return payloads
-
-
-def handle_reconnect(func: Callable) -> Callable:
- """Reconnect the Client
-
- Args:
- func: Client function
-
- Raises:
- UnconnectedClient: Client has not previously been connected
-
- Returns:
- Callable: Function to handle reconnecting
- """
-
- @wraps(func)
- def reconnect(*args, **kwargs):
- self = args[0]
- try:
- return func(*args, **kwargs)
-
- except UnauthorizedException as exc:
- logger.debug("Not currently connected to Bailo")
-
- if self.connection_params:
- logger.debug("Reconnecting")
- self._auth.authenticate_user()
- return func(*args, **kwargs)
-
- logger.error("Client has not previously connected")
- raise UnconnectedClient("Client must call connect to authenticate") from exc
-
- return reconnect
-
-
-def handle_response(
- response: Response, output_dir: str = None
-) -> Optional[Union[str, Dict]]:
- """Handle the response from the server
-
- Args:
- response: Response from the server
- output_dir: Directory to download any files to
-
- Raises:
- UnauthorizedException: Unathorised to access server
-
- Returns:
- Union[str, dict, None]: Response status or message
- """
-
- if 200 <= response.status_code < 300:
- if output_dir:
- _decode_file_content(response.content, output_dir)
- return response.status_code
-
- return response.json()
-
- if response.status_code == 401:
- try:
- data = response.json()
- raise UnauthorizedException(data)
- except JSONDecodeError:
- response.raise_for_status()
-
- try:
- data = response.json()
- return data
-
- except:
- response.raise_for_status()
diff --git a/lib/python/bailoclient/client/validation.py b/lib/python/bailoclient/client/validation.py
deleted file mode 100644
index 139497d1b..000000000
--- a/lib/python/bailoclient/client/validation.py
+++ /dev/null
@@ -1,179 +0,0 @@
-"""Validation functions for use in the client module"""
-
-import json
-import logging
-import os
-from typing import Optional
-
-from requests_toolbelt import MultipartEncoder
-
-from bailoclient.exceptions import DataInvalid, InvalidMetadata, InvalidFilePath
-from bailoclient.models import Model
-
-logger = logging.getLogger(__file__)
-
-
-def minimal_keys_in_dictionary(minimal_dict: dict, test_dict: dict):
- """Check that a dictionary contains all the keys within a minimal dictionary
-
- Args:
- minimal_dict: Minimal dictionary for checking against
- test_dict: Dictionary for checking keys
-
- Returns:
- dict: Result dictionary containing 'valid' and 'error_message' if valid = False
- """
- for key, value in minimal_dict.items():
- try:
- test_dict[key]
- except KeyError:
- return {"valid": False, "error_message": f"must contain '{key}'"}
-
- model_value = test_dict.get(key)
-
- if not model_value and model_value is not False:
- return {"valid": False, "error_message": f"'{key}' cannot be empty"}
-
- if isinstance(value, dict) and not isinstance(model_value, dict):
- return {"valid": False, "error_message": f"missing data under '{key}'"}
-
- if isinstance(value, dict):
- result = minimal_keys_in_dictionary(value, model_value)
-
- if not result["valid"]:
- return result
-
- return {"valid": True}
-
-
-def validate_model_card(model_card: Model):
- """Validate supplied model card
-
- Args:
- model_card: Model
-
- Raises:
- DataInvalid: Model card is not valid
- """
- validation = model_card.validate()
- if not validation.is_valid:
- logger.error("Submitted model card did not validate against model schema")
-
- for err in validation.errors:
- logger.error(err)
- raise DataInvalid(f"Model invalid: {validation.errors}")
-
-
-def validate_metadata(metadata: dict, minimal_metadata_path: str):
- """Validate supplied metadata against a minimal metadata file
-
- Args:
- metadata: Supplied metadata for model or deployment
- minimal_metadata_path: Path to minimal model/deployment metadata for validation
-
- Raises:
- InvalidMetadata: Supplied metadata does not contain all the required parameters
-
- Returns:
- dict: Dictionary of validity and error messages
- """
- with open(minimal_metadata_path, encoding="utf-8") as json_file:
- minimal_metadata = json.load(json_file)
-
- result = minimal_keys_in_dictionary(minimal_metadata, metadata)
-
- if not result["valid"]:
- raise InvalidMetadata(
- f"Metadata {result['error_message']} - refer to minimal_metadata"
- )
-
-
-def validate_file_paths(*args):
- """Validate any filepaths exist and are not directories. Takes any number of string filepaths
-
- Raises:
- InvalidFilePath: File path does not exist
- InvalidFilePath: File path is a directory
- """
- for file_path in args:
- if file_path and not os.path.exists(file_path):
- raise InvalidFilePath(f"{file_path} does not exist")
-
- if file_path and os.path.isdir(file_path):
- raise InvalidFilePath(f"{file_path} is a directory")
-
-
-def too_large_for_gateway(data: MultipartEncoder, aws_gateway: bool) -> bool:
- """If there is an AWS gateway, check that data is not too large
-
- Args:
- data: the data to be uploaded
- aws_gateway: Whether or not the data will be uploaded via AWS gateway.
-
- Returns:
- bool: True if data is too large to be uploaded
- """
- return bool(os.getenv("AWS_GATEWAY", aws_gateway)) and data.len > 10_000_000
-
-
-def deployment_matches(
- deployment: dict,
- deployment_name: str,
- model_uuid: str,
- model_version: str,
-) -> bool:
- """Check whether a deployment matches the provided filters. Returns True if deployment is a match.
-
- Args:
- deployment: The model deployment
- deployment_name: The name of the requested deployment
- model_uuid: The model UUID of the requested deployment
- model_version: The model version of the requested deployment
-
- Returns:
- bool: True if deployment matches provided filters
- """
-
- deployment_details = deployment["metadata"]["highLevelDetails"]
-
- return (
- (deployment_details["name"] == deployment_name)
- and (deployment_details["modelID"] == model_uuid)
- and (
- deployment_details["initialVersionRequested"] == model_version
- or not model_version
- )
- )
-
-
-def validate_uploads(
- model_card: Optional[Model] = None,
- metadata: Optional[dict] = None,
- minimal_metadata_path: Optional[str] = None,
- binary_file: Optional[str] = None,
- code_file: Optional[str] = None,
-):
- """Validate the model and files provided for upload
-
- Args:
- model_card: Model card of the model to update. Defaults to None.
- metadata: Metadata required for uploading a new model. Must
- match the minimal metadata. Defaults to None.
- minimal_metadata_path: something
- binary_file: File path to model binary. Defaults to None.
- code_file: File path to model code. Defaults to None.
-
- Raises:
- DataInvalid: Invalid model
- DataInvalid: Binary or code file does not exist
- InvalidMetadata: Metadata does not meet the minimal metadata
- """
-
- if model_card:
- validate_model_card(model_card)
-
- if metadata:
- validate_metadata(metadata, minimal_metadata_path)
-
- if binary_file and code_file:
- validate_file_paths(binary_file, code_file)
diff --git a/lib/python/bailoclient/config.py b/lib/python/bailoclient/config.py
deleted file mode 100644
index 2fee604f7..000000000
--- a/lib/python/bailoclient/config.py
+++ /dev/null
@@ -1,178 +0,0 @@
-"""Config classes for Client and Auth"""
-
-import json
-import os
-from typing import Optional, Union
-
-import yaml
-from pydantic import BaseSettings
-
-from bailoclient.enums import AuthType
-
-
-class AuthenticationConfig(BaseSettings):
- """Base class for authentication config"""
-
-
-class CognitoConfig(AuthenticationConfig):
- """Configuration for connecting to an AWS Cognito instance."""
-
- username: str
- password: str
- user_pool_id: str
- client_id: str
- client_secret: str
- region: str
-
- @classmethod
- def from_env(cls) -> "CognitoConfig":
- """
- Load Cognito authentication config from environment variables
-
- The following environment variable are supported:
- * COGNITO_USERNAME
- * COGNITO_PASSWORD
- * COGNITO_USERPOOL
- * COGNITO_CLIENT_ID
- * COGNITO_CLIENT_SECRET
- * COGNITO_REGION
-
- Returns:
- CognitoConfig: A cognito authentication configuration object
- """
- return cls(
- username=os.environ["COGNITO_USERNAME"],
- password=os.environ["COGNITO_PASSWORD"],
- user_pool_id=os.environ["COGNITO_USERPOOL"],
- client_id=os.environ["COGNITO_CLIENT_ID"],
- client_secret=os.environ["COGNITO_CLIENT_SECRET"],
- region=os.getenv("COGNITO_REGION"),
- )
-
-
-class Pkcs12Config(AuthenticationConfig):
- """Configuration for connecting using Pkcs12 certificate"""
-
- pkcs12_filename: str
- pkcs12_password: str
-
- @classmethod
- def from_env(cls) -> "Pkcs12Config":
- """Load PKI authentication config from environment variables.
-
- The following environment variable are supported:
- * PKI_CERT_PATH: the path to your PKI certificate
- * PKI_CERT_PASSWORD: the password for your PKI certificate.
-
- If you don't want to expose your password as an environment variable
- please use bailoclient.create_pki_client and you will be prompted for your password.
-
- Returns:
- Pkcs12Config: A cognito authentication configuration object
- """
- return cls(
- pkcs12_filename=os.environ["PKI_CERT_PATH"],
- pkcs12_password=os.environ["PKI_CERT_PASSWORD"],
- )
-
-
-class BailoConfig(BaseSettings):
- """Bailo configuration object"""
-
- auth: Optional[Union[CognitoConfig, Pkcs12Config]] = None
- bailo_url: str
- ca_verify: Union[bool, str] = True
- timeout_period: int = 5 # timeout periods in seconds
- aws_gateway: bool = True # Is Bailo load balanced with an AWS gateway
-
- @classmethod
- def from_env(cls, auth_type: AuthType):
- """
- Load Bailo config from environment variables.
-
- The following environment variable are supported:
- * BAILO_URL: the url of the bailo instance to connect to
- * BAILO_CA_CERT: path to a CA certificate to use for HTTPS connections. Set to "false" to disable TLS verification
- * BAILO_CONNECTION_TIMEOUT: Connection timeout period, defaults to 5
- * BAILO_AWS_GATEWAY: Set to True is Bailo is load-balanced with an AWS gateway.
-
- Refer to your specific authentication config documentations for its supported environment variables.
-
- Args:
- auth_type: The type of authentication needed to authorise to the bailo instance
-
- Returns:
- BailoConfig: A configuration object
- """
-
- if auth_type is AuthType.COGNITO:
- auth = CognitoConfig.from_env()
-
- elif auth_type is AuthType.PKI:
- auth = Pkcs12Config.from_env()
-
- elif auth_type is AuthType.NULL:
- auth = None
-
- else:
- auth = None
-
- return BailoConfig(
- auth=auth,
- bailo_url=os.environ["BAILO_URL"],
- ca_verify=os.getenv("BAILO_CA_CERT") or True,
- aws_gateway=os.getenv("BAILO_AWS_GATEWAY") or True,
- timeout_period=os.getenv("BAILO_CONNECTION_TIMEOUT") or 5,
- )
-
- def save(self, config_path: Union[os.PathLike, str]) -> None:
- """
- Saves a current config as a yaml file.
- Raises an exception if it is unable to save.
-
- Args:
- config_path: Target path to save as a yaml file
-
- Raises:
- IsADirectoryError: Path is a directory not a file
- """
-
- if os.path.isdir(config_path):
- raise IsADirectoryError(
- f"Invalid configuration filepath. {config_path} is a directory"
- )
-
- with open(config_path, "w", encoding="utf-8") as file:
- # Do a round-trip via JSON to take advantage of pydantics encoders
- yaml.dump(json.loads(self.json()), file)
-
- @classmethod
- def load(cls, config_path: Union[os.PathLike, str]) -> "BailoConfig":
- """
- Loads and validates a configuration file.
- Raises an exception if unable to read or load the file.
-
- Args:
- config_path: A path object pointing to a yaml configuration file
-
- Raises:
- FileNotFoundError: _description_
- RuntimeError: _description_
-
- Returns:
- BailoConfig: A configuration object
- """
-
- if not os.path.exists(config_path):
- raise FileNotFoundError(f"Configuration file {config_path} not found")
-
- with open(config_path, "r", encoding="utf-8") as file:
- config_data = yaml.safe_load(file)
-
- try:
- return cls.parse_obj(config_data)
-
- except Exception as exc:
- raise RuntimeError(
- "Configuration file data could not be interpreted as a valid config."
- ) from exc
diff --git a/lib/python/bailoclient/enums.py b/lib/python/bailoclient/enums.py
deleted file mode 100644
index 537b407d7..000000000
--- a/lib/python/bailoclient/enums.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""Enum types for use in bailoclient"""
-
-from enum import Enum, EnumMeta
-
-
-class AuthType(Enum):
- """Enumeration of compatible authentication types"""
-
- COGNITO = "cognito"
- PKI = "pki"
- NULL = "null"
-
-
-class ModelFlavoursMeta(EnumMeta):
- def __contains__(cls, item):
- if not item:
- return False
-
- return isinstance(item, cls) or item in [
- v.value for v in cls.__members__.values()
- ]
-
-
-class ModelFlavour(Enum, metaclass=ModelFlavoursMeta):
- H2O = "h2o"
- KERAS = "keras"
- MLEAP = "mleap"
- PYTORCH = "torch"
- SKLEARN = "sklearn"
- SPARK = "spark"
- TENSORFLOW = "tensorflow"
- ONNX = "onnx"
- GLUON = "gluon"
- XGBOOST = "xgboost"
- LIGHTGBM = "lightgbm"
- CATBOOST = "catboost"
- SPACY = "spacy"
- FASTAI = "fastai"
- STATSMODELS = "statsmodels"
- PROPHET = "prophet"
diff --git a/lib/python/bailoclient/exceptions.py b/lib/python/bailoclient/exceptions.py
deleted file mode 100644
index ea8be6696..000000000
--- a/lib/python/bailoclient/exceptions.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""Exceptions for bailoclient"""
-
-
-class CannotIncrementVersion(Exception):
- """Unable to automatically increment a model card version"""
-
-
-class DirectoryNotFound(Exception):
- """Unable to find directory"""
-
-
-class DeploymentNotFound(Exception):
- """Could not find a deployment"""
-
-
-class DataInvalid(Exception):
- """Invalid data for creating a model"""
-
-
-class IncompleteDotEnvFile(Exception):
- """Dotenv file doesn't contain all required parameters for client authentication"""
-
-
-class InvalidFilePath(Exception):
- """Filepath does not exist or is otherwise invalid"""
-
-
-class InvalidFileRequested(Exception):
- """Invalid file type requested for download"""
-
-
-class InvalidMetadata(Exception):
- """Metadata does not meet the minimal requirement"""
-
-
-class MissingDotEnvFile(Exception):
- """Unable to find dotenv file containing authentication parameters"""
-
-
-class MissingFilesError(Exception):
- """Some required files required for bundling the ML model are missing"""
-
-
-class ModelFileExportNotAllowed(Exception):
- """Exporting model files not allowed for this model"""
-
-
-class ModelFlavourNotFound(Exception):
- """MLflow model flavour not found"""
-
-
-class ModelMethodNotAvailable(Exception):
- """Model bundler/loader function hasn't been implemented for the model type"""
-
-
-class ModelSchemaMissing(Exception):
- """No schema for a model"""
-
-
-class ModelTemplateNotAvailable(Exception):
- """No model.py template code available"""
-
-
-class NoServerResponseMessage(Exception):
- """The server did not send a response message"""
-
-
-class UnableToCreateBailoClient(Exception):
- """Unable to create BAILO client based on user input"""
-
-
-class UnauthorizedException(Exception):
- """User not authorised"""
-
-
-class UnconnectedClient(Exception):
- """Client has not yet been connected"""
-
-
-class UserNotFound(Exception):
- """The requested user was not found in the Bailo instance"""
diff --git a/lib/python/bailoclient/model_handlers/__init__.py b/lib/python/bailoclient/model_handlers/__init__.py
deleted file mode 100644
index 56d4e1975..000000000
--- a/lib/python/bailoclient/model_handlers/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-from .registry import bundler, loader, template
-from .model_bundler import Bundler
-from .model_loader import Loader
diff --git a/lib/python/bailoclient/model_handlers/model_bundler.py b/lib/python/bailoclient/model_handlers/model_bundler.py
deleted file mode 100644
index 0b4360ea7..000000000
--- a/lib/python/bailoclient/model_handlers/model_bundler.py
+++ /dev/null
@@ -1,527 +0,0 @@
-import tempfile
-import subprocess
-import os
-from pathlib import Path
-from zipfile import ZipFile
-from typing import List, Any, Optional
-from pkg_resources import resource_filename
-from shutil import copyfile
-from distutils.dir_util import copy_tree
-
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import (
- ModelFlavourNotFound,
- ModelTemplateNotAvailable,
- MissingFilesError,
- ModelMethodNotAvailable,
-)
-
-
-class Bundler:
- """Class for handling model bundling"""
-
- bundler_functions = {}
- model_py_templates = {}
-
- def bundle_model(
- self,
- output_path: str,
- model: Any = None,
- model_binary: Optional[str] = None,
- model_py: Optional[str] = None,
- model_requirements: Optional[str] = None,
- requirements_files_path: Optional[str] = None,
- model_flavour: Optional[str] = None,
- additional_files: Optional[List[str]] = None,
- ):
- """Bundle model files into the required structure for the code.zip and binary.zip
- for uploading to BAILO.
-
- Calls model bundler if model parameter provided to save the model.
-
- Files are zipped into the expected formats.
-
- Args:
- output_path: Output path for code and binary zips
- model: Model object to save. Must be a bundler available.
- model_binary : Path to model binary. Can be a file or directory. Defaults to None.
- model_py: Path to model.py file. Must have model_flavour if not provided. Defaults to None.
- model_requirements: Path to requirements.txt file OR path to a Python file,
- module or notebook from which to generate the
- requirements.txt. Defaults to None.
- requirements_files_path: File path to file/folder of files from
- which to generate requirements file.
- Defaults to None.
- model_flavour: Name of the flavour of model. Defaults to None.
- additional_files: List or tuple of file paths of additional dependencies
- or directories of dependencies for the model.
- Defaults to None.
- """
-
- if not os.path.exists(output_path):
- Path(output_path).mkdir(parents=True)
-
- if additional_files and not isinstance(additional_files, (tuple, list)):
- raise TypeError("Expected additional_files to be a list of file paths")
-
- if model_flavour:
- model_flavour = model_flavour.lower()
-
- if model:
- self._save_and_bundle_model_files(
- output_path=output_path,
- model=model,
- model_py=model_py,
- model_requirements=model_requirements,
- requirements_files_path=requirements_files_path,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- else:
- self._bundle_model_files(
- output_path=output_path,
- model_binary=model_binary,
- model_py=model_py,
- model_requirements=model_requirements,
- requirements_files_path=requirements_files_path,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- def _bundle_model_files(
- self,
- output_path: str,
- model_binary: str,
- model_flavour: str,
- model_py: Optional[str] = None,
- model_requirements: Optional[str] = None,
- requirements_files_path: Optional[str] = None,
- additional_files: Optional[List[str]] = None,
- ):
- """Bundle model files into the appropriate file structure where the model binary
- has been provided by the user
-
- Args:
- output_path: Output path to save model files to
- model_binary: Path to the model binary file
- model_flavour: Model flavour.
- model_py: Path to model.py file. Will use the template for the model flavour if not provided. Defaults to None.
- model_requirements: Model requirements.txt file. Defaults to None.
- requirements_files_path: File path to file/folder of files from which to generate requirements file. Defaults to None.
- additional_files: List of additional files to include as dependencies. Defaults to None.
-
- Raises:
- MissingFilesError: Missing model binary files
- MissingFilesError: Missing requirements or requirements files path
- ModelFlavourNotFound: Model template has not been provided and the model flavour is not valid
- """
- if not model_binary:
- raise MissingFilesError(
- "Must provide model binary or model object and flavour"
- )
-
- if not model_requirements and not requirements_files_path:
- raise MissingFilesError(
- """Provide either model_requirements (requirements.txt file) or requirements_files_path (your python file/notebook/module)
- from which to generate requirements.txt"""
- )
-
- if not model_py and model_flavour not in ModelFlavour:
- raise ModelFlavourNotFound(
- "A valid model flavour must be provided to generate the model.py file"
- )
-
- if not model_py:
- model_py = self._get_model_template(model_flavour)
-
- self._transfer_and_bundle_model_files(
- output_path=output_path,
- model_binary=model_binary,
- model_py=model_py,
- additional_files=additional_files,
- model_requirements=model_requirements,
- requirements_files_path=requirements_files_path,
- )
-
- def _save_and_bundle_model_files(
- self,
- output_path: str,
- model: Any,
- model_py: Optional[str] = None,
- model_flavour: Optional[str] = None,
- model_requirements: Optional[str] = None,
- requirements_files_path: Optional[str] = None,
- additional_files: List[Optional[str]] = None,
- ):
- """Bundle model files via bundler.
-
- Args:
- output_path: Output path to save model files to
- model: Model object
- model_py: Path to model.py file. Will use the template for the model flavour if not provided. Defaults to None.
- model_flavour: Model flavour. Defaults to None.
- model_requirements: Model requirements.txt file. Generated based on requirements_files_path if model_requirements not provided. Defaults to None.
- requirements_files_path: File path to file/folder of files from which to generate requirements file. Defaults to None.
- additional_files: List of additional files to include as dependencies. Defaults to None.
-
- Raises:
- ModelFlavourNotRecognised: The provided model flavour was not recognised
- """
-
- if model_flavour not in ModelFlavour:
- raise ModelFlavourNotFound("Invalid model flavour")
-
- if not model_py:
- model_py = self._get_model_template(model_flavour)
-
- tmpdir = tempfile.TemporaryDirectory()
-
- model_binary, optional_files = self._bundle_model(
- output_path=tmpdir.name,
- model=model,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- self._transfer_and_bundle_model_files(
- output_path=output_path,
- model_binary=model_binary,
- additional_files=additional_files,
- model_requirements=model_requirements,
- requirements_files_path=requirements_files_path,
- model_py=model_py,
- optional_files=optional_files,
- )
- tmpdir.cleanup()
-
- def _get_model_template(self, model_flavour: str):
- """Get the model.py template file by model flavour
-
- Args:
- model_flavour: Model flavour
-
- Raises:
- ModelTemplateNotAvailable: No model template available for model_flavour
-
- Returns:
- str: file path to model.py template file for the model flavour
- """
- try:
- return self.model_py_templates[model_flavour]
-
- except KeyError:
- raise ModelTemplateNotAvailable(
- f"There is no model template available for {model_flavour}"
- )
-
- def _bundle_model(
- self,
- output_path: str,
- model: Any,
- model_flavour: str,
- additional_files: Optional[List[str]] = None,
- ):
- """Save model via model bundler
-
- Args:
- output_path: Path to save the model to (should be a temp location)
- model: The model object to save
- model_flavour: Model flavour to identify corresponding bundler
- additional_files: Additional files required with the model. Defaults to None.
-
- Raises:
- ModelMethodNotAvailable: There is no bundler function associated with the given
- model flavour
-
- Returns:
- Tuple(str, List[str]): Saved model filepath, list of any additional filepaths
- """
- try:
- bundler_function = self.bundler_functions[model_flavour]
-
- except KeyError:
- raise ModelMethodNotAvailable(
- f"Bundler function does not exist for {model_flavour}"
- ) from None
-
- model_code, optional_files = bundler_function(
- model=model,
- output_path=output_path,
- code_paths=additional_files,
- )
-
- return os.path.normpath(model_code), [
- os.path.normpath(file) for file in optional_files
- ]
-
- def _transfer_and_bundle_model_files(
- self,
- output_path: str,
- model_binary: str,
- model_py: str,
- model_requirements: str,
- requirements_files_path: Optional[str] = None,
- additional_files: Optional[List[str]] = None,
- optional_files: Optional[List[str]] = None,
- ):
- """Create code.zip and binary.zip of provoded model files at output path.
- Copies all files to a tempdir in the format expected by BAILO.
-
- Args:
- output_path: Path to create the code.zip and binary.zip files
- model_binary: Path of model binary
- model_py: Path to model.py file
- model_requirements: Path of requirements.txt file
- requirements_files_path: File path to file/folder of files from which to generate requirements file. Defaults to None.
- additional_files: List of paths of any additional required files
- optional_files: List of optional files which have been output from automatic model bundling (e.g. MLflow file, conda requirements)
- """
-
- with tempfile.TemporaryDirectory() as tmpdir_name:
- code_path = os.path.join(tmpdir_name, "model", "code")
- Path(code_path).mkdir(parents=True)
-
- self._copy_model_py(model_py, code_path)
-
- self._copy_or_generate_requirements(
- model_requirements, requirements_files_path, model_py, code_path
- )
-
- if optional_files:
- self._copy_optional_files(optional_files, code_path)
-
- if additional_files:
- self._copy_additional_files(
- additional_files, model_binary, tempfile.gettempdir(), code_path
- )
-
- self._copy_base_model(os.path.join(code_path, "basemodel"))
-
- # create zips
- self.zip_files(model_binary, os.path.join(output_path, "binary.zip"))
- self.zip_files(code_path, os.path.join(output_path, "code.zip"))
-
- def _copy_model_py(self, model_code: str, code_path: str):
- """Copy model.py file over to the code folder
-
- Args:
- model_code: Path to model.py code file
- code_path: Path to code folder
- """
- copyfile(model_code, os.path.join(code_path, "model.py"))
-
- def _copy_base_model(self, base_model_output_path: str):
- """Copy the base model files (abstract base model class and __init__.py to
- basemodel folder in code directory)
-
- Args:
- base_model_output_path: Path to move the basemodel folder to
- """
- Path(base_model_output_path).mkdir()
-
- copy_tree(
- resource_filename(
- "bailoclient",
- "resources/templates/basemodel",
- ),
- base_model_output_path,
- )
-
- def _copy_or_generate_requirements(
- self,
- model_requirements: str,
- requirements_files_path: str,
- model_code: str,
- output_path: str,
- ):
- """If model_requirements is provided, copy the file to the output code_path.
- Otherwise, if requirements_files_path is given, generate the requirements from
- this file. If no paths are provided, use the model.py file to generate requirements
-
- Args:
- model_requirements: Path to requirements.txt
- requirements_files_path: Path to files from which to generate requirements.txt
- model_code: Path to model.py file
- output_path: Output path for model files
- """
- if model_requirements:
- copyfile(model_requirements, os.path.join(output_path, "requirements.txt"))
-
- elif requirements_files_path:
- self.generate_requirements_file(
- requirements_files_path, os.path.join(output_path, "requirements.txt")
- )
-
- else:
- self.generate_requirements_file(
- model_code, os.path.join(output_path, "requirements.txt")
- )
-
- def _copy_optional_files(self, optional_files: List[str], output_path: str):
- """Copy optional files from bundler module output. These filepaths are
- expected to be in the format tmp/tmpxyz/actual/path as they will be created
- into a temp folder when the bundler is run. This is used to get the
- assumed relative path in the new directory.
-
- Args:
- optional_files: List of optional files to copy
- output_path: Output path for model files
- """
- for file_path in optional_files:
- # remove /tmp/tmpxyz/ from path
- relative_filepath = Path(*Path(file_path).parts[3:])
-
- os.makedirs(
- os.path.dirname(os.path.join(output_path, relative_filepath)),
- exist_ok=True,
- )
-
- copyfile(file_path, os.path.join(output_path, relative_filepath))
-
- def _copy_additional_files(
- self,
- additional_files: List[str],
- model_binary: str,
- temp_dir: str,
- output_path: str,
- ):
- """Copy additional files based on their location. Finds commonpath between
- tmpdir and model binary to establish whether additional files are local
- or in a temp directory
-
- Args:
- additional_files: List of additional file paths
- model_binary: Model binary file path
- temp_dir: Temp directory file path
- output_path: Output path for model code files
- """
- if os.path.commonpath([model_binary, temp_dir]) == temp_dir:
- self.__copy_additional_files_from_tempdir(
- additional_files, os.path.join(output_path, "additional_files")
- )
-
- else:
- self.__copy_additional_files_from_local(
- additional_files,
- output_path,
- os.path.dirname(os.path.normpath(model_binary)),
- )
-
- def __copy_additional_files_from_tempdir(
- self, additional_files: List[str], output_path: str
- ):
- """Copy additional files from temp directory to output path. Creates output directory
- for additional files in the output path.
-
- Args:
- additional_files: List of additional file paths
- output_path: Output path for model files
- """
- Path(output_path).mkdir(parents=True)
-
- for file_path in additional_files:
- copyfile(file_path, os.path.join(output_path, os.path.basename(file_path)))
-
- def __copy_additional_files_from_local(
- self, additional_files: List[str], output_path: str, model_parent_path: str
- ):
- """Copy additional files from the local file system to the output path. Creates
- output directories in the output location to match the file structure of the additional
- files relative to the parent directory
-
- Args:
- additional_files: List of additional file paths
- output_path: Output path for model files
- model_parent_path: Parent path of the additional files to be stripped from the additional files path when copied
- """
-
- ## TODO update to copy tree
- for file_path in additional_files:
- output_file_path = os.path.join(
- output_path, os.path.relpath(file_path, model_parent_path)
- )
- os.makedirs(os.path.dirname(output_file_path), exist_ok=True)
-
- copyfile(file_path, output_file_path)
-
- def zip_files(self, file_path: str, zip_path: str):
- """Create zip file at the specified zip path from a file or folder path
-
- Args:
- file_path: The file or folder to zip
- zip_path: Path to create the new zip at
- """
- if os.path.isdir(file_path):
- self.__zip_directory(file_path, zip_path)
-
- else:
- self.__zip_file(file_path, zip_path)
-
- def __zip_file(self, file_path: str, zip_path: str):
- """Zip a single file into new zip created at zip_path
-
- Args:
- file_path: Path to file to zip
- zip_path: Output path for zip
- """
- file_name = os.path.basename(file_path)
-
- with ZipFile(zip_path, "w") as zf:
- zf.write(file_path, arcname=file_name)
-
- def __zip_directory(self, dir_path: str, zip_path: str):
- """Zip a directory of files into new zip created at the zip_path
-
- Args:
- dir_path: Path to code or binary folder
- zip_path: Output path for zip
- """
- with ZipFile(zip_path, "w") as zf:
- for sub_file_path, _, files in os.walk(dir_path):
- output_dir = self.__get_output_dir(dir_path, sub_file_path)
-
- for file in files:
- zf.write(
- filename=os.path.join(sub_file_path, file),
- arcname=os.path.join(output_dir, file),
- )
-
- def __get_output_dir(self, dir_path: str, sub_dir_path: str):
- """Remove top level folder to get the output dir required for the zip files
-
- Args:
- dir_path: Path to code or binary directory
- sub_dir_path: Directory path within dir_path (i.e. within code or binary folder)
-
- Returns:
- str: Output directory with top level folder removed
- """
- if dir_path == sub_dir_path:
- return ""
-
- return os.path.join(Path(sub_dir_path).relative_to(dir_path)) + os.path.sep
-
- def generate_requirements_file(self, module_path: str, output_path: str):
- """Generate requirements.txt file based on imports within a Notebook,
- Python file, or Python project
-
- Args:
- module_path: Path to the Python file used to generate requirements.txt
- output_path: Output path in format output/path/requirements.txt
-
- Raises:
- Exception: Unable to create requirements.txt from specified file at specified location
- """
- try:
- subprocess.run(
- ["pipreqsnb", module_path, "--savepath", output_path],
- stdout=subprocess.DEVNULL,
- stderr=subprocess.STDOUT,
- )
-
- except subprocess.CalledProcessError:
- raise subprocess.CalledProcessError(
- "Unable to create requirements file at the specified location"
- ) from None
diff --git a/lib/python/bailoclient/model_handlers/model_functions/__init__.py b/lib/python/bailoclient/model_handlers/model_functions/__init__.py
deleted file mode 100644
index 6fc167415..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from .catboost import catboost_bundler, catboost_loader, catboost_template
-from .fastai import fastai_bundler, fastai_loader, fastai_template
-from .gluon import gluon_bundler, gluon_loader, gluon_template
-from .h2o import h2o_bundler, h2o_loader, h2o_template
-from .keras import keras_bundler, keras_loader, keras_template
-from .lightgbm import lightgbm_bundler, lightgbm_loader, lightgbm_template
-from .onnx import onnx_bundler, onnx_loader, onnx_template
-from .prophet import prophet_bundler, prophet_loader, prophet_template
-from .pytorch import pytorch_bundler, pytorch_loader, pytorch_template
-from .sklearn import sklearn_bundler, sklearn_loader, sklearn_template
-from .spacy import spacy_bundler, spacy_loader, spacy_template
-from .spark import spark_bundler, spark_loader, spark_template
-from .statsmodels import statsmodels_bundler, statsmodels_loader, statsmodels_template
-from .tensorflow import tensorflow_bundler, tensorflow_loader, tensorflow_template
-from .xgboost import xgboost_bundler, xgboost_loader, xgboost_template
diff --git a/lib/python/bailoclient/model_handlers/model_functions/catboost.py b/lib/python/bailoclient/model_handlers/model_functions/catboost.py
deleted file mode 100644
index 890233b11..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/catboost.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.CATBOOST)
-def catboost_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a CatBoost model with MLflow
-
- Args:
- model (catboost model): The CatBoost model
- output_path: Path to export the model to
- code_paths: List of additional code paths
-
- Returns:
- Tuple(str, List[str]): Path to saved model binary, paths to additional MLflow files to
- bundle with the model files.
- """
- from mlflow.catboost import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.cb")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.CATBOOST)
-def catboost_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for CatBoost models"
- )
-
-
-@template(flavour=ModelFlavour.CATBOOST)
-def catboost_template():
- """Get the CatBoost model.py template
-
- Returns:
- str: Path to CatBoost model.py template
- """
- return resource_filename("bailoclient", "resources/templates/catboost.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/fastai.py b/lib/python/bailoclient/model_handlers/model_functions/fastai.py
deleted file mode 100644
index 95bf13c9c..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/fastai.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.FASTAI)
-def fastai_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a fast.ai model with MLflow
-
- Args:
- model (fast.ai model): The fast.ai model
- output_path: Path to export the model to
- code_paths: List of additional code paths
-
- Returns:
- Tuple(str, List[str]): Path to saved model binary, paths to additional MLflow files to
- bundle with the model files.
- """
- from mlflow.fastai import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.fastai")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.FASTAI)
-def fastai_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for fast.ai models"
- )
-
-
-@template(flavour=ModelFlavour.FASTAI)
-def fastai_template():
- """Get the fast.ai model.py template
-
- Returns:
- str: Path to fast.ai model.py template
- """
- return resource_filename("bailoclient", "resources/templates/fastai.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/gluon.py b/lib/python/bailoclient/model_handlers/model_functions/gluon.py
deleted file mode 100644
index 4393149bd..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/gluon.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.GLUON)
-def gluon_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a Gluon model with MLflow
-
- Args:
- model (Gluon model): The Gluon model
- output_path: Path to export the model to
- code_paths: List of additional code paths
-
- Returns:
- Tuple(str, List[str]): Path to saved model binary, paths to additional MLflow files to
- bundle with the model files.
- """
- from mlflow.gluon import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "data.net")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.GLUON)
-def gluon_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for Gluon models"
- )
-
-
-@template(flavour=ModelFlavour.GLUON)
-def gluon_template():
- """Get the Gluon model.py template
-
- Returns:
- str: Path to Gluon model.py template
- """
- return resource_filename("bailoclient", "resources/templates/gluon.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/h2o.py b/lib/python/bailoclient/model_handlers/model_functions/h2o.py
deleted file mode 100644
index 46184e7e3..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/h2o.py
+++ /dev/null
@@ -1,50 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.H2O)
-def h2o_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle an H2O model with MLflow
-
- Args:
- model (H2O model): The H2O model
- output_path: Path to export the model to
- code_paths: List of additional code paths
-
- Returns:
- Tuple(str, List[str]): Path to saved model binary, paths to additional MLflow files to
- bundle with the model files.
- """
- from mlflow.h2o import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.h2o")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.H2O)
-def h2o_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for H2O models"
- )
-
-
-@template(flavour=ModelFlavour.H2O)
-def h2o_template():
- """Get the H2O model.py template
-
- Returns:
- str: Path to H2O model.py template
- """
- return resource_filename("bailoclient", "resources/templates/h2o.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/keras.py b/lib/python/bailoclient/model_handlers/model_functions/keras.py
deleted file mode 100644
index b3f8d9eaf..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/keras.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.KERAS)
-def keras_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a Keras model with MLflow
-
- Args:
- model (Keras model): The Keras model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.keras import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.h5")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.KERAS)
-def keras_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for Keras models"
- )
-
-
-@template(flavour=ModelFlavour.KERAS)
-def keras_template():
- """Get the Keras model.py template
-
- Returns:
- str: Path to Keras model.py template
- """
- return resource_filename("bailoclient", "resources/templates/keras.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/lightgbm.py b/lib/python/bailoclient/model_handlers/model_functions/lightgbm.py
deleted file mode 100644
index f842dcb88..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/lightgbm.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.LIGHTGBM)
-def lightgbm_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a LightGBM model with MLflow
-
- Args:
- model (LightGBM model): The LightGBM model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.lightgbm import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.lgb")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.LIGHTGBM)
-def lightgbm_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for LightGBM models"
- )
-
-
-@template(flavour=ModelFlavour.LIGHTGBM)
-def lightgbm_template():
- """Get the LightGBM model.py template
-
- Returns:
- str: Path to LightGBM model.py template
- """
- return resource_filename("bailoclient", "resources/templates/lightgbm.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/onnx.py b/lib/python/bailoclient/model_handlers/model_functions/onnx.py
deleted file mode 100644
index 5ea0ba21b..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/onnx.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.ONNX)
-def onnx_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a ONNX model with MLflow
-
- Args:
- model (ONNX model): The ONNX model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.onnx import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.onnx")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.ONNX)
-def onnx_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for ONNX models"
- )
-
-
-@template(flavour=ModelFlavour.ONNX)
-def onnx_template():
- """Get the ONNX model.py template
-
- Returns:
- str: Path to ONNX model.py template
- """
- return resource_filename("bailoclient", "resources/templates/onnx.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/prophet.py b/lib/python/bailoclient/model_handlers/model_functions/prophet.py
deleted file mode 100644
index 937af5203..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/prophet.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.PROPHET)
-def prophet_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a Prophet model with MLflow
-
- Args:
- model (Prophet model): The Prophet model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.prophet import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.pr")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.PROPHET)
-def prophet_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for Prophet models"
- )
-
-
-@template(flavour=ModelFlavour.PROPHET)
-def prophet_template():
- """Get the Prophet model.py template
-
- Returns:
- str: Path to Prophet model.py template
- """
- return resource_filename("bailoclient", "resources/templates/prophet.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/pytorch.py b/lib/python/bailoclient/model_handlers/model_functions/pytorch.py
deleted file mode 100644
index ad585135d..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/pytorch.py
+++ /dev/null
@@ -1,65 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.PYTORCH)
-def pytorch_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a Pytorch model with MLflow
-
- Args:
- model (Pytorch model): The Pytorch model
- output_path: Path to export the model to
- code_paths: List of additional code paths
-
- Returns:
- Tuple(str, List[str]): Path to saved model binary, paths to additional MLflow files to
- bundle with the model files.
- """
- from mlflow.pytorch import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.pth")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.PYTORCH)
-def pytorch_loader(model_path: str):
- """Load model with Pytorch
-
- Args:
- model_path (str): Path to either the model.pth file
- or the MLflow model directory
-
- Returns:
- Pytorch model: Loaded Pytorch model
- """
-
- if os.path.isfile(model_path):
- import torch
-
- return torch.load(model_path)
-
- if os.path.isdir(model_path):
- from mlflow.pytorch import load_model
-
- return load_model(model_path)
-
-
-@template(flavour=ModelFlavour.PYTORCH)
-def pytorch_template():
- """Get the Pytorch model.py template
-
- Returns:
- str: Path to Pytorch model template
- """
- return resource_filename("bailoclient", "resources/templates/pytorch.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/sklearn.py b/lib/python/bailoclient/model_handlers/model_functions/sklearn.py
deleted file mode 100644
index 5dc091dd9..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/sklearn.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.SKLEARN)
-def sklearn_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a sklearn model with MLflow
-
- Args:
- model (sklearn model): The sklearn model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- model_binary = os.path.join(output_path, "model.pkl")
-
- try:
- from mlflow.sklearn import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- mlflow_files = [
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
- except ModuleNotFoundError:
- import pickle
-
- os.makedirs(f"{output_path}", exist_ok=True)
-
- with open(model_binary, "wb") as f:
- pickle.dump(model, f)
-
- return (model_binary, [])
-
-
-@loader(flavour=ModelFlavour.SKLEARN)
-def sklearn_loader(model_path: str):
- import pickle
-
- with open(model_path, "rb") as model_file:
- return pickle.load(model_file)
-
-
-@template(flavour=ModelFlavour.SKLEARN)
-def sklearn_template():
- """Get the sklearn model.py template
-
- Returns:
- str: Path to sklearn model.py template
- """
- return resource_filename("bailoclient", "resources/templates/sklearn.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/spacy.py b/lib/python/bailoclient/model_handlers/model_functions/spacy.py
deleted file mode 100644
index 096acc63b..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/spacy.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.SPACY)
-def spacy_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a spaCy model with MLflow
-
- Args:
- model (spaCy model): The spaCy model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.spacy import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.spacy")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.SPACY)
-def spacy_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for spaCy models"
- )
-
-
-@template(flavour=ModelFlavour.SPACY)
-def spacy_template():
- """Get the spaCy model.py template
-
- Returns:
- str: Path to spaCy model.py template
- """
- return resource_filename("bailoclient", "resources/templates/spacy.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/spark.py b/lib/python/bailoclient/model_handlers/model_functions/spark.py
deleted file mode 100644
index ea64432ac..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/spark.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.SPARK)
-def spark_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a Spark model with MLflow
-
- Args:
- model (Spark model): The Spark model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.spark import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "sparkml")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.SPARK)
-def spark_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for Spark models"
- )
-
-
-@template(flavour=ModelFlavour.SPARK)
-def spark_template():
- """Get the Spark model.py template
-
- Returns:
- str: Path to Spark model.py template
- """
- return resource_filename("bailoclient", "resources/templates/spark.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/statsmodels.py b/lib/python/bailoclient/model_handlers/model_functions/statsmodels.py
deleted file mode 100644
index f61e297d3..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/statsmodels.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.STATSMODELS)
-def statsmodels_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a statsmodels model with MLflow
-
- Args:
- model (statsmodels model): The statsmodels model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.statsmodels import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.statsmodels")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.STATSMODELS)
-def statsmodels_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for statsmodels models"
- )
-
-
-@template(flavour=ModelFlavour.STATSMODELS)
-def statsmodels_template():
- """Get the statsmodels model.py template
-
- Returns:
- str: Path to statsmodels model.py template
- """
- return resource_filename("bailoclient", "resources/templates/statsmodels.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/tensorflow.py b/lib/python/bailoclient/model_handlers/model_functions/tensorflow.py
deleted file mode 100644
index 60ffd6da9..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/tensorflow.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.TENSORFLOW)
-def tensorflow_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a TensorFlow model with MLflow
-
- Args:
- model (TensorFlow model): The TensorFlow model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.tensorflow import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.TENSORFLOW)
-def tensorflow_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for TensorFlow models"
- )
-
-
-@template(flavour=ModelFlavour.TENSORFLOW)
-def tensorflow_template():
- """Get the TensorFlow model.py template
-
- Returns:
- str: Path to TensorFlow model.py template
- """
- return resource_filename("bailoclient", "resources/templates/tensorflow.py")
diff --git a/lib/python/bailoclient/model_handlers/model_functions/xgboost.py b/lib/python/bailoclient/model_handlers/model_functions/xgboost.py
deleted file mode 100644
index 7df4d20ce..000000000
--- a/lib/python/bailoclient/model_handlers/model_functions/xgboost.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import os
-from typing import List
-from pkg_resources import resource_filename
-
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import ModelMethodNotAvailable
-from bailoclient.model_handlers import bundler, loader, template
-
-
-@bundler(flavour=ModelFlavour.XGBOOST)
-def xgboost_bundler(model, output_path: str, code_paths: List[str]):
- """Bundle a XGBoost model with MLflow
-
- Args:
- model (XGBoost model): The XGBoost model
- output_path: Path to export the model to
- code_paths: List of additional code paths
- """
- from mlflow.xgboost import save_model
-
- save_model(model, path=output_path, code_paths=code_paths)
-
- model_binary = os.path.join(output_path, "data", "model.xgb")
- mlflow_files = [
- os.path.join(output_path, "data", "pickle_module_info.txt"),
- os.path.join(output_path, "MLmodel"),
- ]
-
- return model_binary, mlflow_files
-
-
-@loader(flavour=ModelFlavour.XGBOOST)
-def xgboost_loader(model_path: str):
- raise ModelMethodNotAvailable(
- "The model loader function has not yet been implemented for XGBoost models"
- )
-
-
-@template(flavour=ModelFlavour.XGBOOST)
-def xgboost_template():
- """Get the XGBoost model.py template
-
- Returns:
- str: Path to XGBoost model.py template
- """
- return resource_filename("bailoclient", "resources/templates/xgboost.py")
diff --git a/lib/python/bailoclient/model_handlers/model_loader.py b/lib/python/bailoclient/model_handlers/model_loader.py
deleted file mode 100644
index 23beebe2d..000000000
--- a/lib/python/bailoclient/model_handlers/model_loader.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from bailoclient.enums import ModelFlavour
-from bailoclient.exceptions import (
- ModelFlavourNotFound,
- ModelMethodNotAvailable,
-)
-
-
-class Loader:
- """Class for handling model loader functions"""
-
- model_loaders = {}
-
- def load_model(self, model_path: str, model_flavour: str):
- """Load a model
-
- Args:
- model_path: Path to the actual model file (e.g. './model.pth')
- model_flavour: Flavour of the model (e.g. 'torch')
-
-
- Raises:
- ModelFlavourNotRecognised: The provided model flavour isn't supported
- ModelMethodNotAvailable: The model flavour is supported but the loader function hasn't been implemented
-
- Returns:
- Model: The loaded model
- """
-
- if model_flavour not in ModelFlavour:
- raise ModelFlavourNotFound(
- f"The model flavour {model_flavour} was not found. It may be an unsupported model type."
- )
-
- try:
- loader_func = self.model_loaders[model_flavour]
-
- except KeyError:
- raise ModelMethodNotAvailable(
- "Model loader has not yet been implemented for this model type"
- ) from None
-
- return loader_func(model_path)
diff --git a/lib/python/bailoclient/model_handlers/registry.py b/lib/python/bailoclient/model_handlers/registry.py
deleted file mode 100644
index dee520687..000000000
--- a/lib/python/bailoclient/model_handlers/registry.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from bailoclient.enums import ModelFlavour
-from .model_bundler import Bundler
-from .model_loader import Loader
-
-
-def bundler(flavour: ModelFlavour):
- """Bundler decorator for registering a model bundler
-
- Args:
- flavour: Model flavour
- """
-
- def register_bundler(func):
- """Register the model bundler function to the bundler class
-
- Args:
- func (Callable): Function to register
- """
- Bundler.bundler_functions[flavour.value] = func
-
- return func
-
- return register_bundler
-
-
-def loader(flavour: ModelFlavour):
- """Loader decorator for registering a model loader
-
- Args:
- flavour (ModelFlavour): Model flavour
- """
-
- def register_loader(func):
- """Register the model loader function to the loader class
-
- Args:
- func (Callable): Function to register
- """
- Loader.model_loaders[flavour.value] = func
-
- return func
-
- return register_loader
-
-
-def template(flavour: ModelFlavour):
- """Template decorator for registering a model template
-
- Args:
- flavour (ModelFlavour): Model flavour
- """
-
- def register_template(func):
- """Register the model.py template code to the bundler class
-
- Args:
- func (Callable): Function to register (function should return template path)
- """
- Bundler.model_py_templates[flavour.value] = func()
-
- return func()
-
- return register_template
diff --git a/lib/python/bailoclient/models/__init__.py b/lib/python/bailoclient/models/__init__.py
deleted file mode 100644
index 78fbc3fca..000000000
--- a/lib/python/bailoclient/models/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-"""Models"""
-
-from .model import Model
-from .user import User
diff --git a/lib/python/bailoclient/models/base.py b/lib/python/bailoclient/models/base.py
deleted file mode 100644
index f8b5a71ea..000000000
--- a/lib/python/bailoclient/models/base.py
+++ /dev/null
@@ -1,51 +0,0 @@
-""" Bailo base class """
-import json
-from copy import deepcopy
-from typing import Optional
-
-import munch
-
-
-class BailoBase(munch.AutoMunch):
- """Bailo base class"""
-
- def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- for key, value in self.items():
- super().__setattr__(
- key, value
- ) # AutoMunch converts dict values upon setattr
-
- def _get_self_without_id(self):
- temp = deepcopy(self)
- if "_id" in temp:
- del temp["_id"]
- return temp
-
- def __str__(self) -> str:
- return munch.toJSON(self._get_self_without_id())
-
- def __dir__(self):
- vals = set(list(munch.iterkeys(self)))
- if "_id" in vals:
- vals.remove("_id")
- vals.add("display")
- vals.add("list_fields")
- return list(vals)
-
- def display(self, to_screen: bool = True):
- """Display the pretty JSON of the class details
-
- Args:
- to_screen: Print prettified JSON if True. Defaults to True.
-
- Returns:
- str: prettified JSON
- """
- pretty_json = json.dumps(self._get_self_without_id(), indent=4)
-
- if to_screen:
- print(pretty_json)
- return None
-
- return pretty_json
diff --git a/lib/python/bailoclient/models/model.py b/lib/python/bailoclient/models/model.py
deleted file mode 100644
index aaf6c0b41..000000000
--- a/lib/python/bailoclient/models/model.py
+++ /dev/null
@@ -1,73 +0,0 @@
-"""Model card"""
-from typing import List
-
-import jsonschema
-from bailoclient.exceptions import ModelSchemaMissing
-
-from .base import BailoBase
-
-
-class ValidationError:
- """Validation error"""
-
- def __init__(self, field, description):
- self.field = field
- self.description = description
-
- def __repr__(self):
- return f""
-
-
-class ValidationResult:
- """Results of validation (if valid and any errors)"""
-
- def __init__(self, errors: List[ValidationError]):
- self.errors = errors
- self.is_valid = not self.errors
-
- def __repr__(self):
- return f""
-
-
-class Model(BailoBase):
- """Model card class"""
-
- def __init__(self, *args, **kwargs):
- if "_schema" not in kwargs:
- raise ModelSchemaMissing("Must provide a value for _schema")
-
- super().__init__(*args, **kwargs)
-
- def __dir__(self):
- vals = set(super().__dir__())
- vals.add("validate")
- return list(vals)
-
- @property
- def schema(self):
- """Get model schema
-
- Returns:
- dict: model schema
- """
- return self._schema
-
- def validate(self) -> ValidationResult:
- """Validate the model card
-
- Raises:
- ModelSchemaMissing: Model must have a schema
-
- Returns:
- ValidationResult: Results of validation process, including
- whether the schema is valid and any errors
- """
-
- validator = jsonschema.Draft7Validator(
- self._schema, format_checker=jsonschema.FormatChecker()
- )
- errors = validator.iter_errors(self)
- ret_err = []
- for error in errors:
- ret_err.append(ValidationError(error.path, error.message))
- return ValidationResult(ret_err)
diff --git a/lib/python/bailoclient/models/user.py b/lib/python/bailoclient/models/user.py
deleted file mode 100644
index 773d8a321..000000000
--- a/lib/python/bailoclient/models/user.py
+++ /dev/null
@@ -1,7 +0,0 @@
-"""Bailo user"""
-
-from .base import BailoBase
-
-
-class User(BailoBase):
- """Bailo user class"""
diff --git a/lib/python/bailoclient/resources/minimal_binary.zip b/lib/python/bailoclient/resources/minimal_binary.zip
deleted file mode 100644
index 582bf4c5f..000000000
Binary files a/lib/python/bailoclient/resources/minimal_binary.zip and /dev/null differ
diff --git a/lib/python/bailoclient/resources/minimal_code.zip b/lib/python/bailoclient/resources/minimal_code.zip
deleted file mode 100644
index 57443f5fb..000000000
Binary files a/lib/python/bailoclient/resources/minimal_code.zip and /dev/null differ
diff --git a/lib/python/bailoclient/resources/minimal_deployment_metadata.json b/lib/python/bailoclient/resources/minimal_deployment_metadata.json
deleted file mode 100644
index 669fac423..000000000
--- a/lib/python/bailoclient/resources/minimal_deployment_metadata.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "schemaRef": "/Minimal/Deployment/v6",
- "highLevelDetails": {
- "name": "Test Deployment",
- "endDate": {
- "hasEndDate": true,
- "date": "2022-11-16"
- },
- "modelID": "test-model-abcde"
- },
- "contacts": {
- "owner": [
- {
- "kind": "user",
- "id": "user"
- }
- ]
- }
-}
diff --git a/lib/python/bailoclient/resources/minimal_metadata.json b/lib/python/bailoclient/resources/minimal_metadata.json
deleted file mode 100644
index 89720d8ad..000000000
--- a/lib/python/bailoclient/resources/minimal_metadata.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "schemaRef": "/Minimal/General/v10",
- "highLevelDetails": {
- "tags": ["Test"],
- "name": "Minimal Model for Testing",
- "modelInASentence": "This \"model\" reverses its input.",
- "modelOverview": "This \"model\" reverses its input. It is a tiny model for testing Bailo functionality.",
- "modelCardVersion": "v1.0"
- },
- "contacts": {
- "uploader": [
- {
- "kind": "user",
- "id": "user"
- }
- ],
- "reviewer": [
- {
- "kind": "user",
- "id": "user"
- }
- ],
- "manager": [
- {
- "kind": "user",
- "id": "user"
- }
- ]
- },
- "buildOptions": {
- "uploadType": "Code and binaries",
- "seldonVersion": "seldonio/seldon-core-s2i-python37:1.10.0"
- }
-}
diff --git a/lib/python/bailoclient/resources/templates/basemodel/__init__.py b/lib/python/bailoclient/resources/templates/basemodel/__init__.py
deleted file mode 100644
index 4695000db..000000000
--- a/lib/python/bailoclient/resources/templates/basemodel/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .basemodel import BaseModel
diff --git a/lib/python/bailoclient/resources/templates/basemodel/basemodel.py b/lib/python/bailoclient/resources/templates/basemodel/basemodel.py
deleted file mode 100644
index 25ef24d12..000000000
--- a/lib/python/bailoclient/resources/templates/basemodel/basemodel.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from abc import ABC, abstractmethod
-
-
-class BaseModel(ABC):
- """
- The BaseModel class provides an abstract template for model contributors.
- Models must provide a predict method but do not have to provide metrics or metadata
- """
-
- @abstractmethod
- def __init__(self):
- """
- The model should be loaded here in the Model sub-class generated
- from the BaseModel abstract class
-
- Example:
- self.model = load_model("model")
- """
- super().__init__()
-
- @abstractmethod
- def predict(self, input, features_names):
- """
- Provides a model prediction for a given input and set of feature names
-
- Args:
- input: Prediction input containing a data component
- feature_names: Optional set of feature names
-
- Returns:
- JSON serialisable numpy array, list of values, string or bytes
-
- Example:
- data = input["data"]
- result = self.model.predict(data)
- return result
- """
- pass
-
- def metrics(self):
- """
- Optional method for adding additional metrics
- :return:
-
- Example:
- return an array of metrics tuples
- metrics = [{"type": "COUNTER", "key": "metric_1", "value": 1}]
- return metrics
- """
- pass
-
- def metadata(self):
- """
- Optional metadata method.
- :return:
-
- Example:
- meta = {"field": "value"}
- return meta
- """
- pass
diff --git a/lib/python/bailoclient/resources/templates/catboost.py b/lib/python/bailoclient/resources/templates/catboost.py
deleted file mode 100644
index 246548915..000000000
--- a/lib/python/bailoclient/resources/templates/catboost.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from .basemodel import BaseModel
-
-
-class CatboostModel(BaseModel):
- def __init__(self, cb_model):
- self.cb_model = cb_model
-
- def predict(self, dataframe):
- return self.cb_model.predict(dataframe)
diff --git a/lib/python/bailoclient/resources/templates/fastai.py b/lib/python/bailoclient/resources/templates/fastai.py
deleted file mode 100644
index b2f4ee521..000000000
--- a/lib/python/bailoclient/resources/templates/fastai.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from .basemodel import BaseModel
-
-
-class FastaiModel(BaseModel):
- def __init__(self, learner):
- self.learner = learner
-
- def predict(self, dataframe):
- import pandas as pd
- import numpy as np
-
- dl = self.learner.dls.test_dl(dataframe)
- preds, _ = self.learner.get_preds(dl=dl)
-
- return pd.Series(map(np.array, preds.numpy())).to_frame("predictions")
diff --git a/lib/python/bailoclient/resources/templates/gluon.py b/lib/python/bailoclient/resources/templates/gluon.py
deleted file mode 100644
index b11ff6b68..000000000
--- a/lib/python/bailoclient/resources/templates/gluon.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from .basemodel import BaseModel
-
-
-class GluonModel(BaseModel):
- def __init__(self, gluon_model):
- self.gluon_model = gluon_model
-
- def predict(self, data):
- """
- Args:
- data: Either a pandas DataFrame or a numpy array containing input array values.
- If the input is a DataFrame, it will be converted to an array first by a
- `ndarray = df.values`.
- Returns:
- Model predictions. If the input is a pandas.DataFrame, the predictions are returned
- in a pandas.DataFrame. If the input is a numpy array, the predictions are returned
- as either a numpy.ndarray or a plain list for hybrid models.
- """
- import mxnet as mx
- import numpy as np
- import pandas as pd
-
- if isinstance(data, pd.DataFrame):
- ndarray = mx.nd.array(data.values)
- preds = self.gluon_model(ndarray)
-
- if isinstance(preds, mx.ndarray.ndarray.NDArray):
- preds = preds.asnumpy()
-
- return pd.DataFrame(preds)
-
- elif isinstance(data, np.ndarray):
- ndarray = mx.nd.array(data)
- preds = self.gluon_model(ndarray)
-
- if isinstance(preds, mx.ndarray.ndarray.NDArray):
- preds = preds.asnumpy()
-
- return preds
-
- else:
- raise TypeError("Input data should be pandas.DataFrame or numpy.ndarray")
diff --git a/lib/python/bailoclient/resources/templates/h2o.py b/lib/python/bailoclient/resources/templates/h2o.py
deleted file mode 100644
index 96ed63de8..000000000
--- a/lib/python/bailoclient/resources/templates/h2o.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .basemodel import BaseModel
-
-
-class H2OModel(BaseModel):
- def __init__(self, h2o_model):
- self.h2o_model = h2o_model
-
- def predict(self, dataframe):
- import h2o
-
- predicted = self.h2o_model.predict(h2o.H2OFrame(dataframe)).as_data_frame()
- predicted.index = dataframe.index
-
- return predicted
diff --git a/lib/python/bailoclient/resources/templates/keras.py b/lib/python/bailoclient/resources/templates/keras.py
deleted file mode 100644
index ed20cc34d..000000000
--- a/lib/python/bailoclient/resources/templates/keras.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from .basemodel import BaseModel
-
-
-class KerasModel(BaseModel):
- def __init__(self, keras_model, graph, sess):
- self.keras_model = keras_model
- self._graph = graph
- self._sess = sess
-
- def predict(self, data):
- import pandas as pd
-
- def _predict(data):
- if isinstance(data, pd.DataFrame):
- predicted = pd.DataFrame(self.keras_model.predict(data.values))
- predicted.index = data.index
-
- else:
- predicted = self.keras_model.predict(data)
-
- return predicted
-
- # In TensorFlow < 2.0, we use a graph and session to predict
- if self._graph is not None:
- with self._graph.as_default():
- with self._sess.as_default():
- predicted = _predict(data)
-
- # In TensorFlow >= 2.0, we do not use a graph and session to predict
- else:
- predicted = _predict(data)
-
- return predicted
diff --git a/lib/python/bailoclient/resources/templates/lightgbm.py b/lib/python/bailoclient/resources/templates/lightgbm.py
deleted file mode 100644
index e8880bedf..000000000
--- a/lib/python/bailoclient/resources/templates/lightgbm.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from .basemodel import BaseModel
-
-
-class LGBModel(BaseModel):
- def __init__(self, lgb_model):
- self.lgb_model = lgb_model
-
- def predict(self, dataframe):
- return self.lgb_model.predict(dataframe)
diff --git a/lib/python/bailoclient/resources/templates/onnx.py b/lib/python/bailoclient/resources/templates/onnx.py
deleted file mode 100644
index cd2f83491..000000000
--- a/lib/python/bailoclient/resources/templates/onnx.py
+++ /dev/null
@@ -1,150 +0,0 @@
-from .basemodel import BaseModel
-
-
-FLAVOR_NAME = "onnx"
-ONNX_EXECUTION_PROVIDERS = ["CUDAExecutionProvider", "CPUExecutionProvider"]
-
-
-class OnnxModel(BaseModel):
- def __init__(self, path):
- import onnxruntime
- import os
- import numpy as np
- from pathlib import Path
- from mlflow.models import Model
- from mlflow.models.model import MLMODEL_FILE_NAME
-
- # Get the model meta data from the MLModel yaml file which may contain the providers
- # specification.
- local_path = str(Path(path).parent)
- model_meta = Model.load(os.path.join(local_path, MLMODEL_FILE_NAME))
-
- # Check if the MLModel config has the providers meta data
- if "providers" in model_meta.flavors.get(FLAVOR_NAME).keys():
- providers = model_meta.flavors.get(FLAVOR_NAME)["providers"]
- # If not, then default to the predefined list.
- else:
- providers = ONNX_EXECUTION_PROVIDERS
-
- # NOTE: Some distributions of onnxruntime require the specification of the providers
- # argument on calling. E.g. onnxruntime-gpu. The package import call does not differentiate
- # which architecture specific version has been installed, as all are imported with
- # onnxruntime. onnxruntime documentation says that from v1.9.0 some distributions require
- # the providers list to be provided on calling an InferenceSession. Therefore the try
- # catch structure below attempts to create an inference session with just the model path
- # as pre v1.9.0. If that fails, it will use the providers list call.
- # At the moment this is just CUDA and CPU, and probably should be expanded.
- # A method of user customization has been provided by adding a variable in the save_model()
- # function, which allows the ability to pass the list of execution providers via a
- # optional argument e.g.
- #
- # mlflow.onnx.save_model(..., providers=['CUDAExecutionProvider'...])
- #
- # For details of the execution providers construct of onnxruntime, see:
- # https://onnxruntime.ai/docs/execution-providers/
- #
- # For a information on how execution providers are used with onnxruntime InferenceSession,
- # see the API page below:
- # https://onnxruntime.ai/docs/api/python/api_summary.html#id8
- #
-
- try:
- self.rt = onnxruntime.InferenceSession(path)
- except ValueError:
- self.rt = onnxruntime.InferenceSession(path, providers=providers)
-
- assert len(self.rt.get_inputs()) >= 1
- self.inputs = [(inp.name, inp.type) for inp in self.rt.get_inputs()]
- self.output_names = [outp.name for outp in self.rt.get_outputs()]
-
- def _cast_float64_to_float32(self, feeds):
- for input_name, input_type in self.inputs:
- if input_type == "tensor(float)":
- feed = feeds.get(input_name)
- if feed is not None and feed.dtype == np.float64:
- feeds[input_name] = feed.astype(np.float32)
- return feeds
-
- def predict(self, data):
- """
-
- Args:
- data: Either a pandas DataFrame, numpy.ndarray or a dictionary.
-
- Dictionary input is expected to be a valid ONNX model feed dictionary.
-
- Numpy array input is supported iff the model has a single tensor input and is
- converted into an ONNX feed dictionary with the appropriate key.
-
- Pandas DataFrame is converted to ONNX inputs as follows:
- - If the underlying ONNX model only defines a *single* input tensor, the
- DataFrame's values are converted to a NumPy array representation using the
- `DataFrame.values()
- `_ method.
- - If the underlying ONNX model defines *multiple* input tensors, each column
- of the DataFrame is converted to a NumPy array representation.
-
- For more information about the ONNX Runtime, see
- ``_.
- Returns:
- Model predictions. If the input is a pandas.DataFrame, the predictions are returned
- in a pandas.DataFrame. If the input is a numpy array or a dictionary the
- predictions are returned in a dictionary.
- """
- import numpy as np
- import pandas as pd
-
- if isinstance(data, dict):
- feed_dict = data
- elif isinstance(data, np.ndarray):
- # NB: We do allow scoring with a single tensor (ndarray) in order to be compatible with
- # supported pyfunc inputs iff the model has a single input. The passed tensor is
- # assumed to be the first input.
- if len(self.inputs) != 1:
- inputs = [x[0] for x in self.inputs]
- raise Exception(
- "Unable to map numpy array input to the expected model "
- "input. "
- "Numpy arrays can only be used as input for MLflow ONNX "
- "models that have a single input. This model requires "
- "{} inputs. Please pass in data as either a "
- "dictionary or a DataFrame with the following tensors"
- ": {}.".format(len(self.inputs), inputs)
- )
- feed_dict = {self.inputs[0][0]: data}
- elif isinstance(data, pd.DataFrame):
- if len(self.inputs) > 1:
- feed_dict = {name: data[name].values for (name, _) in self.inputs}
- else:
- feed_dict = {self.inputs[0][0]: data.values}
-
- else:
- raise TypeError(
- "Input should be a dictionary or a numpy array or a pandas.DataFrame, "
- "got '{}'".format(type(data))
- )
-
- # ONNXRuntime throws the following exception for some operators when the input
- # contains float64 values. Unfortunately, even if the original user-supplied input
- # did not contain float64 values, the serialization/deserialization between the
- # client and the scoring server can introduce 64-bit floats. This is being tracked in
- # https://github.com/mlflow/mlflow/issues/1286. Meanwhile, we explicitly cast the input to
- # 32-bit floats when needed. TODO: Remove explicit casting when issue #1286 is fixed.
- feed_dict = self._cast_float64_to_float32(feed_dict)
- predicted = self.rt.run(self.output_names, feed_dict)
-
- if isinstance(data, pd.DataFrame):
-
- def format_output(data):
- # Output can be list and it should be converted to a numpy array
- # https://github.com/mlflow/mlflow/issues/2499
- data = np.asarray(data)
- return data.reshape(-1)
-
- response = pd.DataFrame.from_dict(
- {c: format_output(p) for (c, p) in zip(self.output_names, predicted)}
- )
- return response
- else:
- return dict(zip(self.output_names, predicted))
diff --git a/lib/python/bailoclient/resources/templates/prophet.py b/lib/python/bailoclient/resources/templates/prophet.py
deleted file mode 100644
index 926f4bc4e..000000000
--- a/lib/python/bailoclient/resources/templates/prophet.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from .basemodel import BaseModel
-
-
-class ProphetModel(BaseModel):
- def __init__(self, pr_model):
- self.pr_model = pr_model
-
- def predict(self, dataframe):
- return self.pr_model.predict(dataframe)
diff --git a/lib/python/bailoclient/resources/templates/pytorch.py b/lib/python/bailoclient/resources/templates/pytorch.py
deleted file mode 100644
index ecf9abfdb..000000000
--- a/lib/python/bailoclient/resources/templates/pytorch.py
+++ /dev/null
@@ -1,45 +0,0 @@
-from .basemodel import BaseModel
-
-
-class PyTorchModel(BaseModel):
- """
- Wrapper class that creates a predict function such that
- predict(data: pd.DataFrame) -> model's output as pd.DataFrame (pandas DataFrame)
- """
-
- def __init__(self, pytorch_model):
- self.pytorch_model = pytorch_model
-
- def predict(self, data, device="cpu"):
- import torch
- import pandas as pd
- import numpy as np
-
- if isinstance(data, pd.DataFrame):
- inp_data = data.values.astype(np.float32)
- elif isinstance(data, np.ndarray):
- inp_data = data
- elif isinstance(data, (list, dict)):
- raise TypeError(
- "The PyTorch flavor does not support List or Dict input types. "
- "Please use a pandas.DataFrame or a numpy.ndarray"
- )
- else:
- raise TypeError("Input data should be pandas.DataFrame or numpy.ndarray")
-
- self.pytorch_model.to(device)
- self.pytorch_model.eval()
- with torch.no_grad():
- input_tensor = torch.from_numpy(inp_data).to(device)
- preds = self.pytorch_model(input_tensor)
- if not isinstance(preds, torch.Tensor):
- raise TypeError(
- "Expected PyTorch model to output a single output tensor, "
- "but got output of type '{}'".format(type(preds))
- )
- if isinstance(data, pd.DataFrame):
- predicted = pd.DataFrame(preds.numpy())
- predicted.index = data.index
- else:
- predicted = preds.numpy()
- return predicted
diff --git a/lib/python/bailoclient/resources/templates/sklearn.py b/lib/python/bailoclient/resources/templates/sklearn.py
deleted file mode 100644
index 000581d82..000000000
--- a/lib/python/bailoclient/resources/templates/sklearn.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import pickle
-
-
-class SklearnModel(object):
- """
- Model template. You can load your model parameters in __init__ from a location accessible at runtime
- """
-
- def __init__(self):
- with open("../binary/model.pickle", "rb") as model_file:
- self._model = pickle.load(model_file)
-
- def predict(self, X, feature_names):
- predictions = self._model.predict(X)
- return predictions
diff --git a/lib/python/bailoclient/resources/templates/spacy.py b/lib/python/bailoclient/resources/templates/spacy.py
deleted file mode 100644
index 61d85d609..000000000
--- a/lib/python/bailoclient/resources/templates/spacy.py
+++ /dev/null
@@ -1,31 +0,0 @@
-from .basemodel import BaseModel
-
-
-class SpacyModel(BaseModel):
- def __init__(self, spacy_model):
- self.spacy_model = spacy_model
-
- def predict(self, dataframe):
- """
- Only works for predicting using text categorizer.
- Not suitable for other pipeline components (e.g: parser)
-
- Args:
- dataframe: pandas dataframe containing texts to be categorized
- expected shape is (n_rows,1 column)
-
- Returns:
- dataframe: predictions
- """
- import pandas as pd
-
- if len(dataframe.columns) != 1:
- raise Exception("Shape of input dataframe must be (n_rows, 1column)")
-
- return pd.DataFrame(
- {
- "predictions": dataframe.iloc[:, 0].apply(
- lambda text: self.spacy_model(text).cats
- )
- }
- )
diff --git a/lib/python/bailoclient/resources/templates/spark.py b/lib/python/bailoclient/resources/templates/spark.py
deleted file mode 100644
index 08a03b70a..000000000
--- a/lib/python/bailoclient/resources/templates/spark.py
+++ /dev/null
@@ -1,105 +0,0 @@
-from .basemodel import BaseModel
-
-
-class SparkModel(BaseModel):
- """
- Wrapper around Spark MLlib PipelineModel providing interface for scoring pandas DataFrame.
- """
-
- def __init__(self, spark, spark_model):
- self.spark = spark
- self.spark_model = spark_model
-
- @staticmethod
- def _find_and_set_features_col_as_vector_if_needed(self, spark_df, spark_model):
- """
- Finds the `featuresCol` column in spark_model and
- then tries to cast that column to `vector` type.
- This method is noop if the `featuresCol` is already of type `vector`
- or if it can't be cast to `vector` type
- Note:
- If a spark ML pipeline contains a single Estimator stage, it requires
- the input dataframe to contain features column of vector type.
- But the autologging for pyspark ML casts vector column to array type
- for parity with the pd Dataframe. The following fix is required, which transforms
- that features column back to vector type so that the pipeline stages can correctly work.
- A valid scenario is if the auto-logged input example is directly used
- for prediction, which would otherwise fail without this transformation.
-
- Args:
- spark_df: Input dataframe that contains `featuresCol`
- spark_model: A pipeline model or a single transformer that contains `featuresCol` param
-
- Returns:
- A spark dataframe that contains features column of `vector` type.
- """
- from pyspark.sql.functions import udf
- from pyspark.ml.linalg import Vectors, VectorUDT
- from pyspark.sql import types as t
-
- def _find_stage_with_features_col(stage):
- if stage.hasParam("featuresCol"):
-
- def _array_to_vector(input_array):
- return Vectors.dense(input_array)
-
- array_to_vector_udf = udf(f=_array_to_vector, returnType=VectorUDT())
- features_col_name = stage.extractParamMap().get(stage.featuresCol)
- features_col_type = [
- _field
- for _field in spark_df.schema.fields
- if _field.name == features_col_name
- and _field.dataType
- in [
- t.ArrayType(t.DoubleType(), True),
- t.ArrayType(t.DoubleType(), False),
- ]
- ]
- if len(features_col_type) == 1:
- return spark_df.withColumn(
- features_col_name, array_to_vector_udf(features_col_name)
- )
- return spark_df
-
- if hasattr(spark_model, "stages"):
- for stage in reversed(spark_model.stages):
- return _find_stage_with_features_col(stage)
- return _find_stage_with_features_col(spark_model)
-
- def predict(self, pandas_df):
- """
- Generate predictions given input data in a pandas DataFrame.
-
- Args:
- pandas_df: pandas DataFrame containing input data.
-
- Returns:
- List with model predictions.
- """
- from pyspark.ml import PipelineModel
-
- spark_df = self._find_and_set_features_col_as_vector_if_needed(
- self.spark.createDataFrame(pandas_df), self.spark_model
- )
- prediction_column = "prediction"
- if isinstance(self.spark_model, PipelineModel) and self.spark_model.stages[
- -1
- ].hasParam("outputCol"):
- from pyspark.sql import SparkSession
-
- spark = SparkSession.builder.getOrCreate()
- # do a transform with an empty input DataFrame
- # to get the schema of the transformed DataFrame
- transformed_df = self.spark_model.transform(
- spark.createDataFrame([], spark_df.schema)
- )
- # Ensure prediction column doesn't already exist
- if prediction_column not in transformed_df.columns:
- # make sure predict work by default for Transformers
- self.spark_model.stages[-1].setOutputCol(prediction_column)
- return [
- x.prediction
- for x in self.spark_model.transform(spark_df)
- .select(prediction_column)
- .collect()
- ]
diff --git a/lib/python/bailoclient/resources/templates/statsmodels.py b/lib/python/bailoclient/resources/templates/statsmodels.py
deleted file mode 100644
index 28b4c9aba..000000000
--- a/lib/python/bailoclient/resources/templates/statsmodels.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from .basemodel import BaseModel
-
-
-class StatsmodelsModel(BaseModel):
- def __init__(self, statsmodels_model):
- self.statsmodels_model = statsmodels_model
-
- def predict(self, dataframe):
- from statsmodels.tsa.base.tsa_model import TimeSeriesModel
-
- model = self.statsmodels_model.model
- if isinstance(model, TimeSeriesModel):
- # Assume the inference dataframe has columns "start" and "end", and just one row
- # TODO: move this to a specific mlflow.statsmodels.tsa flavor? Time series models
- # often expect slightly different arguments to make predictions
- if dataframe.shape[0] != 1 or not (
- "start" in dataframe.columns and "end" in dataframe.columns
- ):
- raise Exception(
- "prediction dataframes for a TimeSeriesModel must have exactly one row"
- + " and include columns called start and end"
- )
-
- start_date = dataframe["start"][0]
- end_date = dataframe["end"][0]
- return self.statsmodels_model.predict(start=start_date, end=end_date)
- else:
- return self.statsmodels_model.predict(dataframe)
diff --git a/lib/python/bailoclient/resources/templates/tensorflow.py b/lib/python/bailoclient/resources/templates/tensorflow.py
deleted file mode 100644
index 2f8ab1c99..000000000
--- a/lib/python/bailoclient/resources/templates/tensorflow.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from .basemodel import BaseModel
-
-
-class TensorflowModel(BaseModel):
- """
- Wrapper class that exposes a TensorFlow model for inference via a ``predict`` function such that
- ``predict(data: pandas.DataFrame) -> pandas.DataFrame``. For TensorFlow versions >= 2.0.0.
- """
-
- def __init__(self, model, infer):
- """
- Args:
- model: A Tensorflow SavedModel.
- infer: Tensorflow function returned by a saved model that is used for inference.
- """
- # Note: we need to retain the model reference in TF2Wrapper object, because the infer
- # function in tensorflow will be `ConcreteFunction` which only retains WeakRefs to the
- # variables they close over.
- # See https://www.tensorflow.org/guide/function#deleting_tfvariables_between_function_calls
- self.model = model
- self.infer = infer
-
- def predict(self, data):
- import tensorflow
- import pandas as pd
- import numpy as np
-
- feed_dict = {}
- if isinstance(data, dict):
- feed_dict = {k: tensorflow.constant(v) for k, v in data.items()}
- elif isinstance(data, pd.DataFrame):
- for df_col_name in list(data):
- # If there are multiple columns with the same name, selecting the shared name
- # from the DataFrame will result in another DataFrame containing the columns
- # with the shared name. TensorFlow cannot make eager tensors out of pandas
- # DataFrames, so we convert the DataFrame to a numpy array here.
- val = data[df_col_name]
- if isinstance(val, pd.DataFrame):
- val = val.values
- else:
- val = np.array(val.to_list())
- feed_dict[df_col_name] = tensorflow.constant(val)
- else:
- raise TypeError("Only dict and DataFrame input types are supported")
-
- raw_preds = self.infer(**feed_dict)
- pred_dict = {
- col_name: raw_preds[col_name].numpy() for col_name in raw_preds.keys()
- }
- for col in pred_dict.keys():
- # If the output tensor is not 1-dimensional
- # AND all elements have length of 1, flatten the array with `ravel()`
- if len(pred_dict[col].shape) != 1 and all(
- len(element) == 1 for element in pred_dict[col]
- ):
- pred_dict[col] = pred_dict[col].ravel()
- else:
- pred_dict[col] = pred_dict[col].tolist()
-
- if isinstance(data, dict):
- return pred_dict
- else:
- return pd.DataFrame.from_dict(data=pred_dict)
diff --git a/lib/python/bailoclient/resources/templates/xgboost.py b/lib/python/bailoclient/resources/templates/xgboost.py
deleted file mode 100644
index 0374ec136..000000000
--- a/lib/python/bailoclient/resources/templates/xgboost.py
+++ /dev/null
@@ -1,14 +0,0 @@
-from .basemodel import BaseModel
-
-
-class XGBModel(BaseModel):
- def __init__(self, xgb_model):
- self.xgb_model = xgb_model
-
- def predict(self, dataframe):
- import xgboost as xgb
-
- if isinstance(self.xgb_model, xgb.Booster):
- return self.xgb_model.predict(xgb.DMatrix(dataframe))
- else:
- return self.xgb_model.predict(dataframe)
diff --git a/lib/python/docs/Makefile b/lib/python/docs/Makefile
index d0c3cbf10..f13636a2d 100644
--- a/lib/python/docs/Makefile
+++ b/lib/python/docs/Makefile
@@ -5,8 +5,9 @@
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
-SOURCEDIR = source
-BUILDDIR = build
+SOURCEDIR = .
+BUILDDIR = _build
+BACKENDDIR = ../../../backend/python-docs
# Put it first so that "make" without argument is like "make help".
help:
@@ -18,3 +19,4 @@ help:
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+ cp -R $(BUILDDIR) $(BACKENDDIR)
\ No newline at end of file
diff --git a/lib/python-beta/docs/bailo.core.rst b/lib/python/docs/bailo.core.rst
similarity index 100%
rename from lib/python-beta/docs/bailo.core.rst
rename to lib/python/docs/bailo.core.rst
diff --git a/lib/python-beta/docs/bailo.helper.rst b/lib/python/docs/bailo.helper.rst
similarity index 100%
rename from lib/python-beta/docs/bailo.helper.rst
rename to lib/python/docs/bailo.helper.rst
diff --git a/lib/python-beta/docs/conf.py b/lib/python/docs/conf.py
similarity index 100%
rename from lib/python-beta/docs/conf.py
rename to lib/python/docs/conf.py
diff --git a/lib/python-beta/docs/favicon.png b/lib/python/docs/favicon.png
similarity index 100%
rename from lib/python-beta/docs/favicon.png
rename to lib/python/docs/favicon.png
diff --git a/lib/python-beta/docs/index.rst b/lib/python/docs/index.rst
similarity index 100%
rename from lib/python-beta/docs/index.rst
rename to lib/python/docs/index.rst
diff --git a/lib/python/docs/make.bat b/lib/python/docs/make.bat
index 747ffb7b3..0ddad078e 100644
--- a/lib/python/docs/make.bat
+++ b/lib/python/docs/make.bat
@@ -7,8 +7,9 @@ REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
-set SOURCEDIR=source
-set BUILDDIR=build
+set SOURCEDIR=.
+set BUILDDIR=_build
+set BACKENDDIR=..\..\..\backend\python-docs
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
@@ -26,6 +27,7 @@ if errorlevel 9009 (
if "%1" == "" goto help
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+Xcopy %BUILDDIR% %BACKENDDIR%
goto end
:help
diff --git a/lib/python-beta/docs/notebooks/access_requests_demo.ipynb b/lib/python/docs/notebooks/access_requests_demo.ipynb
similarity index 100%
rename from lib/python-beta/docs/notebooks/access_requests_demo.ipynb
rename to lib/python/docs/notebooks/access_requests_demo.ipynb
diff --git a/lib/python-beta/docs/notebooks/demo_file.txt b/lib/python/docs/notebooks/demo_file.txt
similarity index 100%
rename from lib/python-beta/docs/notebooks/demo_file.txt
rename to lib/python/docs/notebooks/demo_file.txt
diff --git a/lib/python-beta/docs/notebooks/models_and_releases_demo.ipynb b/lib/python/docs/notebooks/models_and_releases_demo.ipynb
similarity index 100%
rename from lib/python-beta/docs/notebooks/models_and_releases_demo.ipynb
rename to lib/python/docs/notebooks/models_and_releases_demo.ipynb
diff --git a/lib/python-beta/docs/notebooks/schemas_demo.ipynb b/lib/python/docs/notebooks/schemas_demo.ipynb
similarity index 100%
rename from lib/python-beta/docs/notebooks/schemas_demo.ipynb
rename to lib/python/docs/notebooks/schemas_demo.ipynb
diff --git a/lib/python-beta/docs/pre-commit-config.md b/lib/python/docs/pre-commit-config.md
similarity index 100%
rename from lib/python-beta/docs/pre-commit-config.md
rename to lib/python/docs/pre-commit-config.md
diff --git a/lib/python-beta/docs/pylint.md b/lib/python/docs/pylint.md
similarity index 100%
rename from lib/python-beta/docs/pylint.md
rename to lib/python/docs/pylint.md
diff --git a/lib/python-beta/docs/pyproject.md b/lib/python/docs/pyproject.md
similarity index 100%
rename from lib/python-beta/docs/pyproject.md
rename to lib/python/docs/pyproject.md
diff --git a/lib/python-beta/docs/readme_link.md b/lib/python/docs/readme_link.md
similarity index 100%
rename from lib/python-beta/docs/readme_link.md
rename to lib/python/docs/readme_link.md
diff --git a/lib/python-beta/docs/requirements.txt b/lib/python/docs/requirements.txt
similarity index 100%
rename from lib/python-beta/docs/requirements.txt
rename to lib/python/docs/requirements.txt
diff --git a/lib/python/docs/source/api.rst b/lib/python/docs/source/api.rst
deleted file mode 100644
index 438270fad..000000000
--- a/lib/python/docs/source/api.rst
+++ /dev/null
@@ -1,33 +0,0 @@
-API
-===
-
-.. module:: bailoclient
-
-
-Bailo
------
-
-.. autoclass:: bailoclient.Bailo
- :members:
- :inherited-members:
-
-
-Client
-------
-
-.. autoclass:: bailoclient.Client
- :members:
- :inherited-members:
-
-
-Config
-------
-
-.. autoclass:: bailoclient.config.BailoConfig
- :members:
-
-.. autoclass:: bailoclient.config.CognitoConfig
- :members:
-
-.. autoclass:: bailoclient.config.Pkcs12Config
- :members:
diff --git a/lib/python/docs/source/bailo_demo.ipynb b/lib/python/docs/source/bailo_demo.ipynb
deleted file mode 100644
index a170d4754..000000000
--- a/lib/python/docs/source/bailo_demo.ipynb
+++ /dev/null
@@ -1,564 +0,0 @@
-{
- "cells": [
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Using the Bailo module \n",
- "\n",
- "To connect to the API, you will need to authenticate the Bailo client. \n",
- "\n",
- "\n",
- "For Cognito authentication:\n",
- "```\n",
- "from bailoclient import Bailo, BailoConfig, CognitoConfig\n",
- "\n",
- "auth = CognitoConfig(\n",
- " username=\"username\",\n",
- " password=\"password\",\n",
- " user_pool_id=\"user-pool-id\",\n",
- " client_id=\"client-id\",\n",
- " client_secret=\"secret\",\n",
- " region=\"region\",\n",
- ")\n",
- "bailo = Bailo(\n",
- " config=BailoConfig(\n",
- " auth=auth,\n",
- " bailo_url=\"https://bailo.io\",\n",
- " ca_cert=\"path/to/ca\",\n",
- " )\n",
- ")\n",
- "```\n",
- "\n",
- "For PKI authentication:\n",
- "```\n",
- "from bailoclient import Bailo, BailoConfig, Pkcs12Config\n",
- "\n",
- "auth = Pkcs12Config(\n",
- " pkcs12_filename=\"path/to/file.pem\",\n",
- " pkcs12_password=\"password\"\n",
- ")\n",
- "bailo = Bailo(\n",
- " config=BailoConfig(\n",
- " auth=auth,\n",
- " bailo_url=\"https://bailo.io\",\n",
- " ca_cert=\"path/to/ca\",\n",
- " )\n",
- ")\n",
- "```\n",
- "\n",
- "\n",
- "If you don't need a client and just want to make use of model loading/bundling functionality:\n",
- "\n",
- "```\n",
- "from bailoclient import Bailo, BailoConfig, Pkcs12Config\n",
- "\n",
- "bailo = Bailo(\n",
- " config=BailoConfig(auth=None, bailo_url=\"none\")\n",
- ")\n",
- "```\n"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "# Example workflow\n",
- "\n",
- "This example workflow will demonstrate the capability of the Python client for interacting with Bailo, as well as its automated model bundling and loading functionality.\n",
- "\n",
- "It is assumed that you have a valid .env file configured (see the README for more information).\n",
- "\n",
- "You will need to have sklearn installed in your Python environment to generate the test model. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# !pip install -U scikit-learn"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Create a simple model\n",
- "\n",
- "We will use sklearn to create a basic model trained on the Iris dataset. The model returns predictions as a numeric label."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from sklearn import datasets\n",
- "from sklearn.svm import SVC\n",
- "\n",
- "iris = datasets.load_iris()\n",
- "clf = SVC()\n",
- "clf.fit(iris.data, iris.target)"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Bundle the required model files\n",
- "\n",
- "Bailo's model bundling functionality can handle much of the process for you. All you need to provide is the model 'flavour' (i.e. the package that the model was produced with)\n",
- "\n",
- "* Generates the requirements.txt file based on an input directory/module/notebook file\n",
- "* Gets the corresponding model.py template (**warning** most of these are currently untested)\n",
- "* Saves your model (many of the bundler flavours available make use of MLflow to save models - you may need this installed to make use of some of the bundler functionality)\n",
- "* Organises your files into code and binary zip folders\n",
- "\n",
- "The sklearn bundler does not require MLflow and will export your model to a .pkl file for you."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "from bailoclient import Bailo, Pkcs12Config, BailoConfig\n",
- "\n",
- "bailo_url = \"...\"\n",
- "auth = Pkcs12Config(...)\n",
- "bailo = Bailo(config=BailoConfig(auth=auth, bailo_url=bailo_url))"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "To check what flavours are available for bundling/loading:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "bailo.flavours"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "output_path = \"./sklearn_example\"\n",
- "\n",
- "bailo.bundle_model(model=clf, output_path=output_path, model_flavour=\"sklearn\")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Upload the bundled model files\n",
- "\n",
- "To upload the model, we need to provide metadata, or the model card. \n",
- "\n",
- "There is a minimal amount of metadata that must be provided that you can look at via the Bailo client. We will set a name, description and overview for our model before uploading it via the client."
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "model_binary = f\"{output_path}/binary.zip\"\n",
- "model_code = f\"{output_path}/code.zip\"\n",
- "\n",
- "# set some of the metadata for the model\n",
- "model_metadata = bailo.minimal_metadata\n",
- "model_metadata[\"highLevelDetails\"][\"name\"] = \"sklearn model\"\n",
- "model_metadata[\"highLevelDetails\"][\"modelInASentence\"] = \"predicts iris data\"\n",
- "model_metadata[\"highLevelDetails\"][\n",
- " \"modelOverview\"\n",
- "] = \"sklearn model to predict iris data\"\n",
- "\n",
- "# upload the model\n",
- "uploaded_model = bailo.upload_model(\n",
- " metadata=model_metadata,\n",
- " binary_file=model_binary,\n",
- " code_file=model_code,\n",
- ")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Check the model has been uploaded\n",
- "\n",
- "Use the client to retrieve all of your models - you should have a model called sklearn-model-xxxxxx"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "user_models = bailo.get_my_models()\n",
- "\n",
- "for model in user_models:\n",
- " print(model.uuid)"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Update the model with a new version of the binary\n",
- "\n",
- "As your model is developed over time you will want to upload new versions. \n",
- "\n",
- "In this case, we want to improve our model by having it return strings of the actual classification labels instead of just a numeric value for its predictions.\n",
- "\n",
- "We will update the model to do this:"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "clf = SVC()\n",
- "\n",
- "clf.fit(iris.data, iris.target_names[iris.target])"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "When we have the updated model we can zip the binary file. We'll skip the Bailo bundling step because we haven't updated any of the code files. Instead we'll create a new binary zip file with our new model pkl file"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import pickle\n",
- "import zipfile\n",
- "import os\n",
- "\n",
- "os.makedirs(f\"{output_path}/new_binary\", exist_ok=True)\n",
- "\n",
- "with open(f\"{output_path}/new_binary/model.pkl\", \"wb\") as f:\n",
- " pickle.dump(clf, f)\n",
- "\n",
- "zipfile.ZipFile(f\"{output_path}/new_binary/binary.zip\", mode=\"w\").write(\n",
- " f\"{output_path}/new_binary/model.pkl\", arcname=\"model.pkl\"\n",
- ")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Update the model metadata\n",
- "\n",
- "We can get the metadata from the model card we retrieved when we called bailo.get_my_models(). We can look at the metadata by calling the display function.\n",
- "\n",
- "You may also want to see which fields are accessible in the model card:\n",
- "\n",
- "```\n",
- " dir(model_card)\n",
- "```\n",
- "\n",
- "Or to look at the validation schema for the model (which should give an indication of which metadata fields are expected and what they should look like):\n",
- "```\n",
- " schema = bailo.get_model_schema(model_uuid)\n",
- "```"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "model_card = user_models[0]\n",
- "\n",
- "model_card.display()"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Update the required fields on the model card\n",
- "\n",
- "As a minimum you will need to update the model version number"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "# get model and required fields for updating (UUID and metadata)\n",
- "model_card = user_models[0]\n",
- "model_uuid = model_card.uuid\n",
- "metadata = model_card.latestVersion.metadata\n",
- "\n",
- "\n",
- "# update some of the metadata fields for the new model version\n",
- "metadata.highLevelDetails.name = f\"Updated sklearn model\"\n",
- "metadata.highLevelDetails.modelCardVersion = \"v2.0\""
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Validate the model card\n",
- "\n",
- "Check that all the fields that we have provided are valid input"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "result = model_card.validate()\n",
- "for error in result.errors:\n",
- " print(f\"{error.field}: {error.description}\")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Push the new model version up to Bailo"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "update_resp = bailo.update_model(\n",
- " metadata=metadata,\n",
- " model_uuid=model_uuid,\n",
- " binary_file=f\"{output_path}/new_binary/binary.zip\",\n",
- " code_file=f\"{output_path}/code.zip\", # code is unchanged, but must still be provided\n",
- ")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Check that the model has been updated\n",
- "\n",
- "Get a new list of user models and identify the most recently uploaded model. Check that the latest version of this model has the expected name"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "user_models = bailo.get_models()\n",
- "latest_model = user_models[0]\n",
- "\n",
- "latest_model.latestVersion.metadata.highLevelDetails.name"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Request a deployment\n",
- "\n",
- "To request a deployment, we have to provide metadata relating to the deployment request.\n",
- "\n",
- "You can access the minimal deployment metadata on the Bailo interface in the same way as we accessed the minimal model metadata. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "deployment_metadata = bailo.minimal_deployment_metadata\n",
- "deployment_metadata"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "Let's edit this metadata to request a deployment for our model\n",
- "\n",
- "* Give the deployment a name\n",
- "* Give the model an end date of tomorrow (end date is not a required field)\n",
- "* Link the deployment request to the model we are requesting via the model UUID\n",
- "* Set the owner to be the current user"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "import datetime\n",
- "\n",
- "# name the deployment\n",
- "deployment_metadata[\"highLevelDetails\"][\"name\"] = \"sklearn test deployment\"\n",
- "\n",
- "# set end date to tomorrow\n",
- "end_date = str(datetime.date.today() + datetime.timedelta(days=1))\n",
- "deployment_metadata[\"highLevelDetails\"][\"endDate\"][\"date\"] = end_date\n",
- "\n",
- "# set model ID to our new model's uuid\n",
- "deployment_metadata[\"highLevelDetails\"][\"modelID\"] = model_uuid\n",
- "\n",
- "# set owner to current user id\n",
- "deployment_metadata[\"contacts\"][\"owner\"][0][\"id\"] = bailo.get_me().id\n",
- "\n",
- "deployment_metadata"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "deployment_request = bailo.request_deployment(deployment_metadata)"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Download the model files for a deployment\n",
- "\n",
- "With an **approved** deployment you can request to download the model files. \n",
- "\n",
- "To carry out this step you will need your deployment request to be approved. Deployment requests cannot be approved via the Python module. \n",
- "\n",
- "By default the download_model_files function will download both the model binary and code. To specify either binary or code download, use e.g. file_type='binary'"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "deployment_uuid = deployment_request[\"uuid\"]\n",
- "model_version = latest_model.latestVersion.metadata.highLevelDetails.modelCardVersion\n",
- "\n",
- "bailo.download_model_files(\n",
- " deployment_uuid=deployment_uuid,\n",
- " model_version=model_version,\n",
- " output_dir=f\"{output_path}/downloaded_model\",\n",
- ")"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Load the model into memory\n",
- "\n",
- "With the model files downloaded we can use the sklearn loader function to automatically load the model object into memory - we now have a sklearn SVC model loaded. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "loaded_model = bailo.load_model(\n",
- " f\"{output_path}/downloaded_model/model.pkl\", model_flavour=\"sklearn\"\n",
- ")\n",
- "type(loaded_model)"
- ]
- },
- {
- "attachments": {},
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Run some predictions\n",
- "\n",
- "Finally, we can run some predictions on the model and see that the model binary has been updated to return the labels of the predictions. "
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "metadata": {},
- "outputs": [],
- "source": [
- "loaded_model.predict(iris.data)"
- ]
- }
- ],
- "metadata": {
- "language_info": {
- "codemirror_mode": {
- "name": "ipython",
- "version": 3
- },
- "file_extension": ".py",
- "mimetype": "text/x-python",
- "name": "python",
- "nbconvert_exporter": "python",
- "pygments_lexer": "ipython3",
- "version": "3.7.16"
- },
- "vscode": {
- "interpreter": {
- "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
- }
- }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/lib/python/docs/source/conf.py b/lib/python/docs/source/conf.py
deleted file mode 100644
index a394965e1..000000000
--- a/lib/python/docs/source/conf.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Configuration file for the Sphinx documentation builder.
-#
-# For the full list of built-in configuration values, see the documentation:
-# https://www.sphinx-doc.org/en/master/usage/configuration.html
-
-# -- Project information -----------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
-
-import os
-import sys
-
-sys.path.insert(0, os.path.abspath("../.."))
-
-import bailoclient
-
-project = "Bailo"
-copyright = "2023, GCHQ"
-author = "GCHQ"
-
-# -- General configuration ---------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
-
-master_doc = "index"
-extensions = [
- "sphinx.ext.autodoc", # extract docs from docstrings
- "sphinx.ext.napoleon", # google style docstring format
- "m2r2", # markdown support
- "myst_nb", # notebook support
-]
-
-autodoc_typehints = "description"
-autodoc_typehints_description_target = "all"
-nb_execution_mode = "off"
-
-
-templates_path = ["_templates"]
-exclude_patterns = []
-
-
-# -- Options for HTML output -------------------------------------------------
-# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
-
-html_theme = "alabaster"
-html_static_path = ["_static"]
diff --git a/lib/python/docs/source/index.rst b/lib/python/docs/source/index.rst
deleted file mode 100644
index 92359e79c..000000000
--- a/lib/python/docs/source/index.rst
+++ /dev/null
@@ -1,33 +0,0 @@
-Getting Started
-===============
-
-.. toctree::
- :maxdepth: 2
-
- readme
-
-
-Examples
-========
-
-.. toctree::
- :maxdepth: 2
-
- bailo_demo.ipynb
-
-
-API Documentation
-=================
-
-.. toctree::
- :maxdepth: 2
-
- api
-
-
-.. Indices and tables
-.. ==================
-
-.. * :ref:`genindex`
-.. * :ref:`modindex`
-.. * :ref:`search`
diff --git a/lib/python/docs/source/readme.rst b/lib/python/docs/source/readme.rst
deleted file mode 100644
index 3bd447c43..000000000
--- a/lib/python/docs/source/readme.rst
+++ /dev/null
@@ -1 +0,0 @@
-.. mdinclude:: ../../README.md
diff --git a/lib/python-beta/docs/vertical-white.png b/lib/python/docs/vertical-white.png
similarity index 100%
rename from lib/python-beta/docs/vertical-white.png
rename to lib/python/docs/vertical-white.png
diff --git a/lib/python/jwt-token/README.md b/lib/python/jwt-token/README.md
deleted file mode 100644
index 1ae135af0..000000000
--- a/lib/python/jwt-token/README.md
+++ /dev/null
@@ -1,21 +0,0 @@
-## Generate a JWT for connecting to BAILO
-
-In order to make REST requests to the API programmatically, you'll need to supply a JWT in order for your request to be
-authenticated.
-
-This python function will return a token to use as part of your request.
-
-username = the username of the user object in AWS Cognito password = the password of the user object app_client_id = the
-App Client Id (can be found on AWS Cognito [select 'App clients' menu on the left of the Cognito page])
-app_client_secret = the app client secret. This can be found on the same page above.
-
-How to use:
-
-```bash
-python3 generate_jwt.py
-```
-
-Please note that this functionality is a temporary solution to help provide tokens in order for programmatic requests to
-the BAILO API.
-
-Currently, the Gateway used to connect to BAILO is only directing traffic to the mlops-dev cluster.
diff --git a/lib/python/jwt-token/generate_jwt.py b/lib/python/jwt-token/generate_jwt.py
deleted file mode 100644
index 01e6e8e80..000000000
--- a/lib/python/jwt-token/generate_jwt.py
+++ /dev/null
@@ -1,29 +0,0 @@
-import sys
-import hmac, hashlib, base64
-import boto3
-
-
-def generate_auth_token(username, password, app_client_id, key):
- message = bytes(username + app_client_id, "utf-8")
- key = bytes(key, "utf-8")
- secret_hash = base64.b64encode(
- hmac.new(key, message, digestmod=hashlib.sha256).digest()
- ).decode()
-
- client = boto3.client("cognito-idp")
- token_result = client.initiate_auth(
- AuthFlow="USER_PASSWORD_AUTH",
- AuthParameters={
- "USERNAME": username,
- "PASSWORD": password,
- "SECRET_HASH": secret_hash,
- },
- ClientId=app_client_id,
- )
-
- return token_result["AuthenticationResult"]["AccessToken"]
-
-
-if __name__ == "__main__":
- token = generate_auth_token(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4])
- print(token)
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/meta.yaml b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/meta.yaml
new file mode 100644
index 000000000..29c6e8b00
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/meta.yaml
@@ -0,0 +1,15 @@
+artifact_uri: mlflow-artifacts:/0/8acb27a5fb214c87a9fbcf4b2f81d92e/artifacts
+end_time: 1706787690055
+entry_point_name: ''
+experiment_id: '0'
+lifecycle_stage: active
+run_id: 8acb27a5fb214c87a9fbcf4b2f81d92e
+run_name: judicious-doe-739
+run_uuid: 8acb27a5fb214c87a9fbcf4b2f81d92e
+source_name: ''
+source_type: 4
+source_version: ''
+start_time: 1706787689872
+status: 3
+tags: []
+user_id: ubuntu
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/metrics/val_loss b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/metrics/val_loss
new file mode 100644
index 000000000..99e11849c
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/metrics/val_loss
@@ -0,0 +1 @@
+1706787690020 0.1 0
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/params/lr b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/params/lr
new file mode 100644
index 000000000..eb5a1db86
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/params/lr
@@ -0,0 +1 @@
+0.001
\ No newline at end of file
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.runName b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.runName
new file mode 100644
index 000000000..ebcab573c
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.runName
@@ -0,0 +1 @@
+judicious-doe-739
\ No newline at end of file
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.name b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.name
new file mode 100644
index 000000000..75cd866f9
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.name
@@ -0,0 +1 @@
+/home/ubuntu/.local/lib/python3.10/site-packages/ipykernel_launcher.py
\ No newline at end of file
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.type b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.type
new file mode 100644
index 000000000..0c2c1fe9d
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.source.type
@@ -0,0 +1 @@
+LOCAL
\ No newline at end of file
diff --git a/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.user b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.user
new file mode 100644
index 000000000..7d13753d7
--- /dev/null
+++ b/lib/python/mlruns/0/8acb27a5fb214c87a9fbcf4b2f81d92e/tags/mlflow.user
@@ -0,0 +1 @@
+ubuntu
\ No newline at end of file
diff --git a/lib/python/mlruns/0/meta.yaml b/lib/python/mlruns/0/meta.yaml
new file mode 100644
index 000000000..d7490dfd5
--- /dev/null
+++ b/lib/python/mlruns/0/meta.yaml
@@ -0,0 +1,6 @@
+artifact_location: mlflow-artifacts:/0
+creation_time: 1706180215508
+experiment_id: '0'
+last_update_time: 1706180215508
+lifecycle_stage: active
+name: Default
diff --git a/lib/python/notebooks/experiment_tracking_docs.ipynb b/lib/python/notebooks/experiment_tracking_docs.ipynb
new file mode 100644
index 000000000..8f148a122
--- /dev/null
+++ b/lib/python/notebooks/experiment_tracking_docs.ipynb
@@ -0,0 +1,70 @@
+{
+ "cells": [
+ {
+ "attachments": {},
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Experiment Management"
+ ]
+ },
+ {
+ "attachments": {},
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In addition to the core concepts of **models** and **releases**, it may also be beneficial to manage model experimentation as part of your workflow. Whilst the UI and backend don't currently support experiments as a concept, you may manage these locally using the python client and publish the best experiments as releases. \n",
+ "\n",
+ "Benefits of experiment tracking with Bailo:\n",
+ "* Log and compare multiple experiment runs to select the best model for publishing to the marketplace.\n",
+ " * Include artifacts (files or images) and references to datasets.\n",
+ "* Integrate with more purpose built tracking tools such as **MLFlow Tracking** to easily convert existing experiments.\n",
+ "* Automatically write experiment run information to a model card.\n",
+ "\n",
+ "⚠ *Bailo as a service is not designed to log every experiment you do, and instead should be used for models which are ready to share or deploy.*\n",
+ "\n",
+ "\n",
+ "Read more about how to manage experiments on the python docs here."
+ ]
+ },
+ {
+ "attachments": {},
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Accessing a model"
+ ]
+ },
+ {
+ "attachments": {},
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "In order to access model artifacts (e.g. images and files), the user must submit an **access request**. Access requests are used to control access to these resources, and can be approved or have changes requested by an MSRO (or another reviewer). These access requests apply to specific models, but upon approval access will be granted to all releases belonging to said model. Below is a walkthrough of how to submit an access request using the UI.\n",
+ "\n",
+ "1. First, ensure that you have grounds to access to a model *before* submitting an access request as you will need a justification.\n",
+ "2. On the model page for your selected model, navigate to the **access requests** page.\n",
+ "3. On this page, select the **request access** button. You will be prompted to select a schema, which you should select based on your organisations rules.\n",
+ "4. Work through and fill out all required fields, then press **submit**.\n",
+ "5. The MSRO will then either accept, or request changes based on the information provided.\n",
+ "\n",
+ "⚠ **If you're an MSRO:** Please check out the approvals section to learn how you can manage reviews.\n",
+ "\n"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": "3.10.12"
+ },
+ "orig_nbformat": 4
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/lib/python-beta/pyproject.toml b/lib/python/pyproject.toml
similarity index 100%
rename from lib/python-beta/pyproject.toml
rename to lib/python/pyproject.toml
diff --git a/lib/python/pytest.ini b/lib/python/pytest.ini
deleted file mode 100644
index a7d322f40..000000000
--- a/lib/python/pytest.ini
+++ /dev/null
@@ -1,7 +0,0 @@
-# pytest.ini
-[pytest]
-minversion = 6.0
-addopts = -ra -q
-testpaths = tests
-filterwarnings=
- ignore::DeprecationWarning:Crypto
diff --git a/lib/python/requirements.txt b/lib/python/requirements.txt
deleted file mode 100644
index aefbcb6d2..000000000
--- a/lib/python/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
--e .[dev]
diff --git a/lib/python/setup.py b/lib/python/setup.py
deleted file mode 100644
index 5459bfbbc..000000000
--- a/lib/python/setup.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from setuptools import setup
-
-setup(
- name="bailoclient",
- version="0.1.0",
- description="A python client for interacting with the Bailo model management platform",
- packages=["bailoclient"],
- install_requires=[
- "boto3 <= 1.17.109",
- "jsonschema <= 3.2.0",
- "munch <= 2.5.0",
- "pipreqs <= 0.4.13",
- "pipreqsnb <= 0.2.4",
- "pycognito <= 2022.12.0",
- "pydantic <= 1.8",
- "python-dotenv <= 0.20.0",
- "PyYAML <= 6.0.1",
- "requests <= 2.25.1",
- "requests-pkcs12 <= 1.13",
- "requests-toolbelt <= 0.9.1",
- ],
- extras_require={
- "dev": [
- "black[jupyter]",
- "pre-commit",
- "pylint",
- "pytest >= 6.2",
- "sphinx >= 5.3.0",
- "m2r2 >= 0.3.1",
- "myst-nb >= 0.17.2",
- ],
- "bundling": ["mlflow"],
- },
- classifiers=[
- "Development Status :: 2 - Pre-Alpha",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: 3.10",
- ],
-)
diff --git a/lib/python-beta/src/bailo/__init__.py b/lib/python/src/bailo/__init__.py
similarity index 100%
rename from lib/python-beta/src/bailo/__init__.py
rename to lib/python/src/bailo/__init__.py
diff --git a/lib/python-beta/src/bailo/core/__init__.py b/lib/python/src/bailo/core/__init__.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/__init__.py
rename to lib/python/src/bailo/core/__init__.py
diff --git a/lib/python-beta/src/bailo/core/agent.py b/lib/python/src/bailo/core/agent.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/agent.py
rename to lib/python/src/bailo/core/agent.py
diff --git a/lib/python-beta/src/bailo/core/client.py b/lib/python/src/bailo/core/client.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/client.py
rename to lib/python/src/bailo/core/client.py
diff --git a/lib/python-beta/src/bailo/core/enums.py b/lib/python/src/bailo/core/enums.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/enums.py
rename to lib/python/src/bailo/core/enums.py
diff --git a/lib/python-beta/src/bailo/core/exceptions.py b/lib/python/src/bailo/core/exceptions.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/exceptions.py
rename to lib/python/src/bailo/core/exceptions.py
diff --git a/lib/python-beta/src/bailo/core/utils.py b/lib/python/src/bailo/core/utils.py
similarity index 100%
rename from lib/python-beta/src/bailo/core/utils.py
rename to lib/python/src/bailo/core/utils.py
diff --git a/lib/python-beta/src/bailo/helper/__init__.py b/lib/python/src/bailo/helper/__init__.py
similarity index 100%
rename from lib/python-beta/src/bailo/helper/__init__.py
rename to lib/python/src/bailo/helper/__init__.py
diff --git a/lib/python-beta/src/bailo/helper/access_request.py b/lib/python/src/bailo/helper/access_request.py
similarity index 100%
rename from lib/python-beta/src/bailo/helper/access_request.py
rename to lib/python/src/bailo/helper/access_request.py
diff --git a/lib/python-beta/src/bailo/helper/model.py b/lib/python/src/bailo/helper/model.py
similarity index 100%
rename from lib/python-beta/src/bailo/helper/model.py
rename to lib/python/src/bailo/helper/model.py
diff --git a/lib/python-beta/src/bailo/helper/release.py b/lib/python/src/bailo/helper/release.py
similarity index 100%
rename from lib/python-beta/src/bailo/helper/release.py
rename to lib/python/src/bailo/helper/release.py
diff --git a/lib/python-beta/src/bailo/helper/schema.py b/lib/python/src/bailo/helper/schema.py
similarity index 100%
rename from lib/python-beta/src/bailo/helper/schema.py
rename to lib/python/src/bailo/helper/schema.py
diff --git a/lib/python/test.ipynb b/lib/python/test.ipynb
new file mode 100644
index 000000000..146849856
--- /dev/null
+++ b/lib/python/test.ipynb
@@ -0,0 +1,203 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from bailo import Model, Client, Schema, SchemaKind\n",
+ "from io import BytesIO"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "client = Client(\"http://127.0.0.1:8080\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "metrics_schema = {\n",
+ " \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n",
+ " \"type\": \"object\",\n",
+ " \"properties\": {\n",
+ " \"overview\": {\n",
+ " \"title\": \"Overview\",\n",
+ " \"description\": \"Summary of the model functionality.\",\n",
+ " \"type\": \"object\",\n",
+ " \"properties\": {\n",
+ " \"modelOverview\": {\n",
+ " \"title\": \"What does the model do?\",\n",
+ " \"description\": \"A description of what the model does.\",\n",
+ " \"type\": \"string\",\n",
+ " \"minLength\": 1,\n",
+ " \"maxLength\": 5000,\n",
+ " },\n",
+ " \"tags\": {\n",
+ " \"title\": \"Descriptive tags for the model.\",\n",
+ " \"description\": \"These tags will be searchable and will help others find this model.\",\n",
+ " \"type\": \"array\",\n",
+ " \"widget\": \"tagSelectorBeta\",\n",
+ " \"items\": {\"type\": \"string\"},\n",
+ " \"uniqueItems\": True,\n",
+ " },\n",
+ " },\n",
+ " \"required\": [],\n",
+ " \"additionalProperties\": False,\n",
+ " },\n",
+ " \"performance\": {\n",
+ " \"title\": \"Performance\",\n",
+ " \"description\": \"Performance metrics and parameters\",\n",
+ " \"type\": \"object\",\n",
+ " \"properties\": {\n",
+ " \"performanceOverview\": {\n",
+ " \"title\": \"How well does the model perform?\",\n",
+ " \"description\": \"A description of model performance\",\n",
+ " \"type\": \"string\",\n",
+ " \"minLength\": 1,\n",
+ " \"maxLength\": 5000,\n",
+ " },\n",
+ " \"metricsParameters\": {\n",
+ " \"title\": \"Metrics & Parameters\",\n",
+ " \"description\": \"Model metrics & parameters\",\n",
+ " \"type\": \"array\",\n",
+ " \"items\": {\"type\": \"object\"},\n",
+ " },\n",
+ " },\n",
+ " \"required\": [],\n",
+ " \"additionalProperties\": True\n",
+ " },\n",
+ " },\n",
+ " \"required\": [],\n",
+ " \"additionalProperties\": False,\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "{'id': 'performance-schema4', 'name': 'Performance Schema 4', 'description': 'Performance Schema', 'active': True, 'hidden': False, 'kind': 'model', 'jsonSchema': {'$schema': 'http://json-schema.org/draft-07/schema#', 'type': 'object', 'properties': {'overview': {'title': 'Overview', 'description': 'Summary of the model functionality.', 'type': 'object', 'properties': {'modelOverview': {'title': 'What does the model do?', 'description': 'A description of what the model does.', 'type': 'string', 'minLength': 1, 'maxLength': 5000}, 'tags': {'title': 'Descriptive tags for the model.', 'description': 'These tags will be searchable and will help others find this model.', 'type': 'array', 'widget': 'tagSelectorBeta', 'items': {'type': 'string'}, 'uniqueItems': True}}, 'required': [], 'additionalProperties': False}, 'performance': {'title': 'Performance', 'description': 'Performance metrics and parameters', 'type': 'object', 'properties': {'performanceOverview': {'title': 'How well does the model perform?', 'description': 'A description of model performance', 'type': 'string', 'minLength': 1, 'maxLength': 5000}, 'metricsParameters': {'title': 'Metrics & Parameters', 'description': 'Model metrics & parameters', 'type': 'array', 'items': {'type': 'object'}}}, 'required': [], 'additionalProperties': True}}, 'required': [], 'additionalProperties': False}, '_id': '65d724686504994e7c357be3', 'createdAt': '2024-02-22T10:39:36.174Z', 'updatedAt': '2024-02-22T10:39:36.174Z', '__v': 0}\n"
+ ]
+ }
+ ],
+ "source": [
+ "schema = Schema.create(client=client, schema_id=\"performance-schema4\", name=\"Performance Schema 4\", description=\"Performance Schema\", kind=SchemaKind.MODEL, json_schema=metrics_schema)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "class NestedDict(dict):\n",
+ " def __getitem__(self,keytuple):\n",
+ " # if key is not a tuple then access as normal\n",
+ " if not isinstance(keytuple, tuple):\n",
+ " return super(NestedDict,self).__getitem__(keytuple)\n",
+ " d = self\n",
+ " for key in keytuple:\n",
+ " d = d[key]\n",
+ " return d\n",
+ " def __setitem__(self, keytuple, item):\n",
+ " # if key is not a tuple then access as normal\n",
+ " if not isinstance(keytuple, tuple):\n",
+ " return super(NestedDict,self).__setitem__(keytuple, item)\n",
+ " d = self\n",
+ " for index, key in enumerate(keytuple):\n",
+ " if index != len(keytuple) - 1:\n",
+ " try:\n",
+ " d = d[key]\n",
+ " except:\n",
+ " d[key] = {}\n",
+ " d = d[key]\n",
+ "\n",
+ " else:\n",
+ " d[key] = item"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "d = {}\n",
+ "d = NestedDict(d)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "loc = 'overview.name.metric1'\n",
+ "d[tuple(loc.split('.'))] = \"value\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "{'overview': {'name': {'metric1': 'value'}}}"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "d"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.12"
+ },
+ "orig_nbformat": 4
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/lib/python/tests/__init__.py b/lib/python/test.txt
similarity index 100%
rename from lib/python/tests/__init__.py
rename to lib/python/test.txt
diff --git a/lib/python-beta/tests/conftest.py b/lib/python/tests/conftest.py
similarity index 100%
rename from lib/python-beta/tests/conftest.py
rename to lib/python/tests/conftest.py
diff --git a/lib/python/tests/data/.gitkeep b/lib/python/tests/data/.gitkeep
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/data/responses.json b/lib/python/tests/data/responses.json
deleted file mode 100644
index 0967ef424..000000000
--- a/lib/python/tests/data/responses.json
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/lib/python/tests/e2e/__init__.py b/lib/python/tests/e2e/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/e2e/test_upload_and_update_model.py b/lib/python/tests/e2e/test_upload_and_update_model.py
deleted file mode 100644
index ce9da3970..000000000
--- a/lib/python/tests/e2e/test_upload_and_update_model.py
+++ /dev/null
@@ -1,96 +0,0 @@
-import json
-import os
-
-import pytest
-
-from bailoclient.client import Client
-from bailoclient.config import BailoConfig, CognitoConfig
-
-os.environ["BAILO_URL"] = "http://localhost:8080/api/v1"
-
-
-@pytest.fixture
-def cognito_client():
- # load_dotenv()
-
- ### Configure client based on local secrets
- config = BailoConfig(
- auth=CognitoConfig(
- username=os.getenv("COGNITO_USERNAME"),
- password=os.getenv("COGNITO_PASSWORD"),
- user_pool_id=os.getenv("COGNITO_USERPOOL"),
- client_id=os.getenv("COGNITO_CLIENT_ID"),
- client_secret=os.getenv("COGNITO_CLIENT_SECRET"),
- region=os.getenv("COGNITO_REGION"),
- ),
- bailo_url=os.getenv("BAILO_URL"),
- )
- return Client(config)
-
-
-@pytest.fixture
-def null_client():
- config = BailoConfig(bailo_url=os.getenv("BAILO_URL"), ca_verify=True)
- return Client(config)
-
-
-def test_upload_and_update_model(null_client):
- client = null_client
-
- # Upload model
- with open(
- os.path.join(
- os.path.dirname(__file__),
- "../../bailoclient/resources/minimal_metadata.json",
- )
- ) as json_file:
- metadata = json.load(json_file)
-
- uploaded_model = client.upload_model(
- metadata=metadata,
- binary_file=os.path.join(
- os.path.dirname(__file__), "../../bailoclient/resources/minimal_binary.zip"
- ),
- code_file=os.path.join(
- os.path.dirname(__file__), "../../bailoclient/resources/minimal_code.zip"
- ),
- )
-
- model_uuid = uploaded_model["uuid"]
- model_card = client.get_model_card(model_uuid)
-
- assert uploaded_model.get("uuid")
-
- # Check that current user is model card owner
- user = client.get_me()
-
- assert user.id == model_card.latestVersion.metadata.contacts.uploader[0].id
-
- # Check model schema
- model_schema = client.get_model_schema(model_uuid)
- assert model_schema["reference"] == "/Minimal/General/v10"
-
- # update model card
- new_model_card = model_card.latestVersion.metadata.copy()
- new_model_card.highLevelDetails.name = "Updated Model"
- new_model_card.highLevelDetails.modelCardVersion = "v2.0"
-
- # Update the model
- updated_model = client.update_model(
- new_model_card,
- model_uuid,
- binary_file=os.path.join(
- os.path.dirname(__file__), "../../bailoclient/resources/minimal_binary.zip"
- ),
- code_file=os.path.join(
- os.path.dirname(__file__), "../../bailoclient/resources/minimal_code.zip"
- ),
- )
-
- assert updated_model["uuid"] == model_uuid
-
- # Check a new version has been added
- updated_model_card = client.get_model_card(model_uuid)
-
- assert len(model_card.versions) == 1
- assert len(updated_model_card.versions) == 2
diff --git a/lib/python/tests/mocks/__init__.py b/lib/python/tests/mocks/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/mocks/mock_api.py b/lib/python/tests/mocks/mock_api.py
deleted file mode 100644
index 47df5c111..000000000
--- a/lib/python/tests/mocks/mock_api.py
+++ /dev/null
@@ -1,113 +0,0 @@
-from bailoclient.config import BailoConfig
-from lib.python.bailoclient.client.auth import AuthenticationInterface
-from bailoclient.client import APIInterface
-
-from typing import Optional, Dict, Any
-import os
-import json
-
-
-class MockAPI(APIInterface):
- """Mock class to return API respones. It requires a response file formatted like {"VERB":{"/path/subpath":"response"}}.
- VERB should be a standard HTTP verb (e.g. GET, POST, etc.).
- If the response data is the name of a JSON file that file will be loaded as the response instead. All paths are relative to the response file.
- Paths formatted as '/path/{variable}' will be handled at some point.
- Currently we don't handle request parameters or request bodies
- """
-
- def __init__(
- self, config: BailoConfig, auth: AuthenticationInterface, responses: os.PathLike
- ):
- self.config = config
- self.auth = auth
- self.load_responses(responses)
-
- def load_responses(self, response_file: str):
- """Load responses from the passed file"""
- if not os.path.exists(response_file):
- raise FileNotFoundError(f"Response file {response_file} does not exist")
-
- with open(response_file, "r") as f:
- responses_dict = json.load(f)
-
- self.responses = {}
-
- root_dir = os.path.dirname(response_file)
-
- for verb in responses_dict:
- self.responses[verb] = {}
-
- for response_type in responses_dict[verb]:
- data = responses_dict[verb][response_type]
-
- ### Check if we have a response or if it's pointing us at another file.
- if not data.endswith(".json"):
- self.responses[verb][response_type] = data
- else:
- data_path = os.path.join(root_dir, data)
- if not os.path.exists(data_path):
- raise FileNotFoundError(
- f"Response file {data_path} for {response_type} does not exist."
- )
-
- with open(data_path, "r") as fr:
- self.responses[verb][response_type] = json.load(fr)
-
- def _handle_request(
- self, verb: str, request_path: str, *args, **kwargs
- ) -> Dict[str, str]:
- """Internal function to handle auth and reading from the response dictionary"""
-
- if not self.auth.is_authenticated():
- result = self.auth.authenticate_user()
- if not result:
- return None
-
- if request_path in self.responses[verb]:
- return self.responses["GET"][request_path]
-
- raise Exception(f"Response missing for {request_path}")
-
- def get(
- self, request_path: str, request_params: Optional[Dict[str, str]] = None
- ) -> Dict[str, str]:
- """Make a GET request against the mock API requests. This will not do any validation of parameters prior to sending.
-
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_params: Any query parameters to be passed to the API
- return: A JSON object returned by the API. Throws an exception if the request fails
- """
-
- return self._handle_request("GET", request_path, request_params)
-
- def post(
- self,
- request_path: str,
- request_body: str,
- request_params: Optional[Dict[str, str]] = None,
- ) -> Dict[str, str]:
- """Make a POST request against the mock API. This will not do any validation of parameters prior to sending.
-
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_body: The full request body as a string
- request_params: Any query parameters to be passed to the API
- return: A JSON object returned by the API. Throws an exception if the request fails
- """
-
- return self._handle_request("POST", request_path, request_params, request_body)
-
- def put(
- self,
- request_path: str,
- request_body: str,
- request_params: Optional[Dict[str, str]] = None,
- ) -> Dict[str, str]:
- """Make a PUT request against the mock API. This will not do any validation of parameters prior to sending.
-
- request_path: The requested path relative to the API (e.g. /model/summary)
- request_body: The full request body as a string
- request_params: Any query parameters to be passed to the API
- return: A JSON object returned by the API. Throws an exception if the request fails
- """
-
- return self._handle_request("PUT", request_path, request_params, request_body)
diff --git a/lib/python/tests/mocks/mock_auth.py b/lib/python/tests/mocks/mock_auth.py
deleted file mode 100644
index a09fa0d91..000000000
--- a/lib/python/tests/mocks/mock_auth.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from lib.python.bailoclient.client.auth import AuthenticationInterface
-
-from typing import Optional, Dict
-
-
-class MockAuthentication(AuthenticationInterface):
- """Mock authenticator"""
-
- def __init__(self):
- self.can_authenticate = True
- self.is_authed = False
- pass
-
- def set_can_authenticate(self, can_authenticate: bool):
- """Resets authentication state and sets whether any future authentication attempts will succeed"""
- self.can_authenticate = can_authenticate
- self.is_authed = False
-
- def authenticate_user(self) -> bool:
- """Authenticate the user. Returns False if the authentication fails
-
- : return: True if authentication is successful
- """
-
- self.is_authed = self.can_authenticate
- return self.is_authed
-
- def is_authenticated(self) -> bool:
- """Returns True if the user is authenticated"""
- return self.is_authed
-
- def get_authorisation_headers(self) -> Optional[Dict[str, str]]:
- """Authenticate and get the required headers that can be used to send an API request. Return None if the authentication fails
- Note that this interface will definitely change once more auth types are explored.
- """
- if self.is_authed:
- return None
- return {}
diff --git a/lib/python/tests/resources/invalid_metadata.json b/lib/python/tests/resources/invalid_metadata.json
deleted file mode 100644
index 4537b474c..000000000
--- a/lib/python/tests/resources/invalid_metadata.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
- "schemaRef": "/Minimal/General/v10",
- "highLevelDetails": {
- "tags": ["Test"],
- "name": "Minimal Model for Testing",
- "modelInASentence": "This is a sentence",
- "modelOverview": "This is the model overview",
- "modelCardVersion": ""
- },
- "contacts": "me"
-}
diff --git a/lib/python/tests/resources/requirements/__init__.py b/lib/python/tests/resources/requirements/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/resources/requirements/file.py b/lib/python/tests/resources/requirements/file.py
deleted file mode 100644
index df3fc1048..000000000
--- a/lib/python/tests/resources/requirements/file.py
+++ /dev/null
@@ -1,2 +0,0 @@
-import pydantic
-import requests
diff --git a/lib/python/tests/resources/responses/responses.json b/lib/python/tests/resources/responses/responses.json
deleted file mode 100644
index d45fe540e..000000000
--- a/lib/python/tests/resources/responses/responses.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "GET": {},
- "POST": {},
- "PUT": {}
-}
diff --git a/lib/python-beta/tests/test_access_request.py b/lib/python/tests/test_access_request.py
similarity index 100%
rename from lib/python-beta/tests/test_access_request.py
rename to lib/python/tests/test_access_request.py
diff --git a/lib/python-beta/tests/test_client.py b/lib/python/tests/test_client.py
similarity index 100%
rename from lib/python-beta/tests/test_client.py
rename to lib/python/tests/test_client.py
diff --git a/lib/python-beta/tests/test_files.py b/lib/python/tests/test_files.py
similarity index 100%
rename from lib/python-beta/tests/test_files.py
rename to lib/python/tests/test_files.py
diff --git a/lib/python-beta/tests/test_model.py b/lib/python/tests/test_model.py
similarity index 100%
rename from lib/python-beta/tests/test_model.py
rename to lib/python/tests/test_model.py
diff --git a/lib/python-beta/tests/test_release.py b/lib/python/tests/test_release.py
similarity index 100%
rename from lib/python-beta/tests/test_release.py
rename to lib/python/tests/test_release.py
diff --git a/lib/python-beta/tests/test_schema.py b/lib/python/tests/test_schema.py
similarity index 100%
rename from lib/python-beta/tests/test_schema.py
rename to lib/python/tests/test_schema.py
diff --git a/lib/python/tests/unit/__init__.py b/lib/python/tests/unit/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/unit/client/__init__.py b/lib/python/tests/unit/client/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/unit/client/conftest.py b/lib/python/tests/unit/client/conftest.py
deleted file mode 100644
index 113a40812..000000000
--- a/lib/python/tests/unit/client/conftest.py
+++ /dev/null
@@ -1,50 +0,0 @@
-from unittest.mock import Mock
-
-import pytest
-from requests import Response
-
-from bailoclient.client.auth import NullAuthenticator
-
-
-@pytest.fixture
-def mock_response():
- return Mock(spec=Response)
-
-
-@pytest.fixture
-def mock_auth():
- class MockAuth(NullAuthenticator):
- """Mock to test adding auth headers"""
-
- def get_authorisation_headers(self):
- return {"header": "value"}
-
- return MockAuth()
-
-
-@pytest.fixture
-def deployment_1():
- return {
- "metadata": {
- "highLevelDetails": {
- "name": "deployment_name",
- "modelID": "id",
- "initialVersionRequested": "1",
- },
- "timeStamp": "2022-09-29T14:08:37.528Z",
- }
- }
-
-
-@pytest.fixture
-def deployment_2():
- return {
- "metadata": {
- "highLevelDetails": {
- "name": "deployment_name",
- "modelID": "id",
- "initialVersionRequested": "2",
- },
- "timeStamp": "2022-09-30T14:08:37.528Z",
- }
- }
diff --git a/lib/python/tests/unit/client/test_auth.py b/lib/python/tests/unit/client/test_auth.py
deleted file mode 100644
index ad58c0005..000000000
--- a/lib/python/tests/unit/client/test_auth.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from unittest.mock import MagicMock
-
-import pytest
-import os
-
-from bailoclient.client.auth import CognitoSRPAuthenticator
-from bailoclient.config import BailoConfig, CognitoConfig
-from bailoclient.exceptions import UnauthorizedException
-
-
-@pytest.fixture()
-def cognito_authenticator(bailo_url):
- cognito_config = CognitoConfig(
- username="COGNITO_USERNAME",
- password="COGNITO_PASSWORD",
- user_pool_id="COGNITO_USERPOOL",
- client_id="COGNITO_CLIENT_ID",
- client_secret="COGNITO_CLIENT_SECRET",
- region="COGNITO_REGION",
- )
-
- config = BailoConfig(
- auth=cognito_config,
- bailo_url=bailo_url,
- ca_verify=True,
- )
-
- authenticator = CognitoSRPAuthenticator(config.auth)
- aws_auth_response = {"AuthenticationResult": {"AccessToken": "token"}}
- authenticator._CognitoSRPAuthenticator__try_authorise = MagicMock(
- return_value=aws_auth_response
- )
-
- return authenticator
-
-
-def test_authenticate_user(cognito_authenticator):
- assert not cognito_authenticator.is_authenticated()
-
-
-def test_authenticate_user_true_after_authentication(cognito_authenticator):
- cognito_authenticator.authenticate_user("COGNITO_USERNAME", "COGNITO_PASSWORD")
-
- assert cognito_authenticator.is_authenticated()
-
-
-def test_auth_headers_raises_error_if_not_authenticated(cognito_authenticator):
- with pytest.raises(
- UnauthorizedException, match="Authenticator not yet authorised."
- ):
- cognito_authenticator.get_authorisation_headers()
-
-
-def test_auth_headers(cognito_authenticator):
- cognito_authenticator.authenticate_user("COGNITO_USERNAME", "COGNITO_PASSWORD")
-
- headers = cognito_authenticator.get_authorisation_headers()
-
- assert headers.get("Authorization")
diff --git a/lib/python/tests/unit/client/test_client.py b/lib/python/tests/unit/client/test_client.py
deleted file mode 100644
index 754d2e14c..000000000
--- a/lib/python/tests/unit/client/test_client.py
+++ /dev/null
@@ -1,346 +0,0 @@
-import json
-import os
-import re
-from unittest.mock import Mock, patch
-from pkg_resources import resource_filename
-
-import pytest
-from bailoclient.models import User
-from bailoclient.exceptions import (
- CannotIncrementVersion,
- DeploymentNotFound,
- InvalidFileRequested,
- UserNotFound,
-)
-
-
-@patch("bailoclient.client.Client.get_model_schema", autospec=True)
-def test_get_model_schema(patch_get_model_schema, null_client):
- patch_get_model_schema.return_value = {"response": "this is a thing"}
-
- model_uuid = "xyz"
-
- resp = null_client.get_model_schema(model_uuid)
-
- assert resp == {"response": "this is a thing"}
-
-
-@patch("bailoclient.client.Client.get_users")
-def test_get_user_by_name_returns_user_object_with_matching_name(
- patch_get_users, null_client
-):
- patch_get_users.return_value = [User({"id": "user"})]
-
- user = null_client.get_user_by_name("user")
-
- assert user.id == "user"
-
-
-@patch("bailoclient.client.Client.get_users")
-def test_get_user_by_name_raise_exception_if_no_matching_users(
- patch_get_users, null_client
-):
- patch_get_users.return_value = [User({"id": "user"})]
-
- with pytest.raises(UserNotFound):
- null_client.get_user_by_name("test")
-
-
-@patch("bailoclient.client.Client._Client__model")
-def test_get_model_card_gets_version_if_version_provided(patch_model, null_client):
- model_uuid = "id"
- model_version = "version"
-
- null_client.api.get = Mock()
- null_client.get_model_card(model_uuid=model_uuid, model_version=model_version)
-
- null_client.api.get.assert_called_once_with(
- f"model/{model_uuid}/version/{model_version}"
- )
-
-
-@patch("bailoclient.client.Client._Client__model")
-def test_get_model_card_gets_model_if_no_version_provided(patch_model, null_client):
- model_uuid = "id"
-
- null_client.api.get = Mock()
- null_client.get_model_card(model_uuid=model_uuid)
-
- null_client.api.get.assert_called_once_with(f"model/uuid/{model_uuid}")
-
-
-def test_post_model_raises_error_if_invalid_mode_given(null_client):
- with pytest.raises(
- ValueError,
- match=re.escape("Invalid mode - must be either newVersion or newModel"),
- ):
- null_client._post_model(
- model_data="",
- mode="invalid",
- )
-
-
-def test_increment_version_increases_version_by_one(null_client):
- null_client.api.get = Mock(return_value=[{"version": "1"}, {"version": "2"}])
-
- version = null_client._increment_model_version("model_uuid")
-
- assert version == "3"
-
-
-def test_increment_model_version_raises_error_if_unable_to_increase_version_by_one(
- null_client,
-):
- null_client.api.get = Mock(return_value=[{"version": "a"}, {"version": "b"}])
-
- with pytest.raises(
- CannotIncrementVersion,
- match="Please manually provide an updated version number",
- ):
- null_client._increment_model_version("model_uuid")
-
-
-@patch("bailoclient.client.client.generate_payload")
-@patch("bailoclient.client.client.validate_uploads")
-@patch("bailoclient.client.client.too_large_for_gateway", return_value=False)
-@patch("bailoclient.client.Client._post_model")
-def test_update_model_is_called_with_expected_params(
- patch_post_model,
- patch_gateway,
- patch_validate_uploads,
- patch_generate_payload,
- null_client,
-):
- mode = "newVersion"
- model_uuid = "model_abc"
- binary_file = "../../frontend/cypress/fixtures/minimal_binary.zip"
- code_file = "../../frontend/cypress/fixtures/minimal_code.zip"
- metadata = {"highLevelDetails": {"modelCardVersion": "2"}}
- metadata_json = json.dumps(metadata)
-
- payload = Mock(metadata_json, content_type="content")
- patch_generate_payload.return_value = payload
-
- null_client.update_model(
- metadata=metadata,
- model_uuid=model_uuid,
- binary_file=binary_file,
- code_file=code_file,
- )
-
- patch_validate_uploads.assert_called_once_with(
- binary_file=binary_file,
- code_file=code_file,
- metadata=metadata,
- minimal_metadata_path=resource_filename(
- "bailoclient", "resources/minimal_metadata.json"
- ),
- )
-
- patch_generate_payload.assert_called_once_with(
- metadata_json, binary_file, code_file
- )
-
- patch_post_model.assert_called_once_with(
- model_data=payload, mode=mode, model_uuid=model_uuid
- )
-
-
-@patch("bailoclient.client.client.generate_payload")
-@patch("bailoclient.client.client.validate_uploads")
-@patch("bailoclient.client.client.too_large_for_gateway", return_value=True)
-def test_update_model_raises_exception_if_model_files_too_large(
- patch_too_large_for_gateway,
- patch_validate_uploads,
- patch_generate_payload,
- null_client,
-):
- with pytest.raises(ValueError):
- model_uuid = "model_abc"
- binary_file = "../../frontend/cypress/fixtures/minimal_binary.zip"
- code_file = "../../frontend/cypress/fixtures/minimal_code.zip"
- metadata = {"highLevelDetails": {"modelCardVersion": "2"}}
-
- null_client.update_model(
- metadata=metadata,
- model_uuid=model_uuid,
- binary_file=binary_file,
- code_file=code_file,
- )
-
-
-@patch("bailoclient.client.client.generate_payload")
-@patch("bailoclient.client.client.validate_uploads")
-@patch("bailoclient.client.client.too_large_for_gateway", return_value=False)
-@patch("bailoclient.client.Client._post_model")
-def test_upload_model_is_called_with_expected_params(
- patch_post_model,
- patch_too_large_for_gateway,
- patch_validate_uploads,
- patch_generate_payload,
- null_client,
-):
- binary_file = "../../frontend/cypress/fixtures/minimal_binary.zip"
- code_file = "../../frontend/cypress/fixtures/minimal_code.zip"
- metadata = {"highLevelDetails": {"modelCardVersion": "2"}}
- metadata_json = json.dumps(metadata)
-
- payload = Mock({"payload": "data"}, content_type="content")
-
- patch_generate_payload.return_value = payload
-
- null_client.upload_model(
- metadata=metadata,
- binary_file=binary_file,
- code_file=code_file,
- )
-
- patch_validate_uploads.assert_called_once_with(
- binary_file=binary_file,
- code_file=code_file,
- metadata=metadata,
- minimal_metadata_path=resource_filename(
- "bailoclient", "resources/minimal_metadata.json"
- ),
- )
-
- patch_generate_payload.assert_called_once_with(
- metadata_json, binary_file, code_file
- )
-
- patch_too_large_for_gateway.assert_called_once_with(payload, True)
-
- patch_post_model.assert_called_once_with(payload)
-
-
-@patch("bailoclient.client.client.generate_payload")
-@patch("bailoclient.client.client.validate_uploads")
-@patch("bailoclient.client.client.too_large_for_gateway", return_value=True)
-def test_upload_model_raises_exception_if_model_files_too_large(
- patch_too_large_for_gateway,
- patch_validate_uploads,
- patch_generate_payload,
- null_client,
-):
- with pytest.raises(ValueError):
- model_uuid = "model_abc"
- binary_file = "../../frontend/cypress/fixtures/minimal_binary.zip"
- code_file = "../../frontend/cypress/fixtures/minimal_code.zip"
- metadata = {"highLevelDetails": {"modelCardVersion": "2"}}
-
- null_client.upload_model(
- metadata=metadata,
- binary_file=binary_file,
- code_file=code_file,
- )
-
-
-def test_download_model_files_raises_error_if_file_type_is_not_code_or_binary(
- null_client,
-):
- with pytest.raises(InvalidFileRequested):
- null_client.download_model_files(
- deployment_uuid="test", model_version="1", file_type="invalid"
- )
-
-
-def test_download_model_files_raises_error_if_output_dir_already_exists_and_user_has_not_specified_overwrite(
- null_client, tmpdir
-):
- with pytest.raises(FileExistsError):
- null_client.download_model_files(
- deployment_uuid="test", model_version="1", output_dir=str(tmpdir)
- )
-
-
-def test_download_model_files_overwrites_existing_output_dir_if_user_has_specified_overwrite(
- null_client, tmpdir
-):
- deployment_uuid = "test"
- model_version = "1"
- file_type = "binary"
-
- null_client.api.get = Mock(return_value=200)
-
- null_client.download_model_files(
- deployment_uuid=deployment_uuid,
- model_version=model_version,
- file_type=file_type,
- output_dir=str(tmpdir),
- overwrite=True,
- )
-
- null_client.api.get.assert_called_once_with(
- f"/deployment/{deployment_uuid}/version/{model_version}/raw/{file_type}",
- output_dir=str(tmpdir),
- )
-
-
-@pytest.mark.parametrize(
- "file_type, expected_call_count",
- [("binary", 1), ("code", 1), (None, 2)],
-)
-def test_download_model_files_does_expected_api_calls(
- file_type, expected_call_count, null_client
-):
- null_client.api.get = Mock(return_value=200)
-
- null_client.download_model_files(
- deployment_uuid="test",
- model_version="1",
- file_type=file_type,
- output_dir="dir",
- overwrite=True,
- )
-
- assert null_client.api.get.call_count == expected_call_count
-
-
-@patch("bailoclient.client.Client.get_me", return_value=User(_id="user"))
-@patch(
- "bailoclient.client.Client.get_user_deployments",
- return_value={"deployment_id": "deployment"},
-)
-def test_get_my_deployments_gets_deployments_for_current_user(
- patch_get_user_deployments, patch_get_me, null_client
-):
- null_client.get_my_deployments()
-
- patch_get_user_deployments.assert_called_once()
- patch_get_me.assert_called_once()
-
-
-@patch("bailoclient.client.Client.get_my_deployments", return_value=[])
-def test_find_my_deployment_raises_error_if_no_user_deployments_found(
- patch_get_my_deployments, null_client
-):
- with pytest.raises(DeploymentNotFound):
- null_client.find_my_deployment(deployment_name="deployment", model_uuid="model")
-
-
-@patch("bailoclient.client.Client.get_my_deployments")
-def test_find_my_deployment_raises_error_if_no_deployments_match(
- patch_get_my_deployments, null_client, deployment_1
-):
- patch_get_my_deployments.return_value = [deployment_1]
-
- with pytest.raises(DeploymentNotFound):
- null_client.find_my_deployment(
- deployment_name="deployment_name", model_uuid="incorrect_id"
- )
-
-
-@patch("bailoclient.client.Client.get_my_deployments")
-def test_find_my_deployment_finds_latest_version_if_multiple_matching_deployments_found(
- patch_get_my_deployments, null_client, deployment_1, deployment_2
-):
- older_deployment = deployment_1
- newer_deployment = deployment_2
-
- patch_get_my_deployments.return_value = [older_deployment, newer_deployment]
-
- my_deployment = null_client.find_my_deployment(
- deployment_name="deployment_name", model_uuid="id"
- )
-
- assert my_deployment == newer_deployment
diff --git a/lib/python/tests/unit/client/test_http.py b/lib/python/tests/unit/client/test_http.py
deleted file mode 100644
index 0f69de3fd..000000000
--- a/lib/python/tests/unit/client/test_http.py
+++ /dev/null
@@ -1,208 +0,0 @@
-import os
-from unittest.mock import Mock, patch
-
-import pytest
-import requests
-import requests_pkcs12
-
-from bailoclient.client.http import RequestsAdapter
-from bailoclient.client.auth import NullAuthenticator
-
-
-MINIMAL_MODEL_PATH = os.getenv("MINIMAL_MODEL_PATH")
-
-
-@pytest.fixture
-def response_mock():
- return Mock(spec=requests.Response)
-
-
-@pytest.fixture
-def requests_mock():
- return Mock(spec=requests)
-
-
-@pytest.fixture
-def mock_auth_headers():
- return {"header": "value"}
-
-
-@pytest.fixture
-def auth_mock(mock_auth_headers):
- class MockAuth(NullAuthenticator):
- """Mock to test adding auth headers"""
-
- def get_authorisation_headers(self):
- return mock_auth_headers
-
- return MockAuth()
-
-
-@pytest.fixture
-def api_mock(null_bailo_config, auth_mock):
- api = RequestsAdapter(null_bailo_config)
- api._requests_module = Mock(spec=requests)
- api._auth = auth_mock
- return api
-
-
-@pytest.fixture
-def pki_api_mock(pki_bailo_config, auth_mock):
- api = RequestsAdapter(pki_bailo_config)
- api._requests_module = Mock(spec=requests_pkcs12)
- api._auth = auth_mock
- return api
-
-
-@patch("bailoclient.client.http.RequestsAdapter._connect")
-def test_cognito_uses_requests_module(patch_connect, cognito_bailo_config):
- api = RequestsAdapter(cognito_bailo_config)
- assert api._requests_module == requests
-
-
-def test_pki_uses_requests_pkcs12_module(pki_bailo_config):
- api = RequestsAdapter(pki_bailo_config)
- assert api._requests_module == requests_pkcs12
-
-
-def test_null_uses_requests_module(null_bailo_config):
- api = RequestsAdapter(null_bailo_config)
- assert api._requests_module == requests
-
-
-def test_get_request_with_response(
- api_mock, response_mock, mock_auth_headers, null_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 200
- api_mock._requests_module.get.return_value = response_mock
-
- api_mock.get("/test/url")
- api_mock._requests_module.get.assert_called_once_with(
- f"{null_bailo_config.bailo_url}/test/url",
- headers=mock_auth_headers,
- params=None,
- timeout=null_bailo_config.timeout_period,
- verify=null_bailo_config.ca_verify,
- )
-
-
-def test_get_pki_request_with_response(
- pki_api_mock, response_mock, mock_auth_headers, pki_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 201
- pki_api_mock._requests_module.get.return_value = response_mock
-
- pki_api_mock.get("/test/url")
- pki_api_mock._requests_module.get.assert_called_once_with(
- f"{pki_bailo_config.bailo_url}/test/url",
- pkcs12_filename=pki_bailo_config.auth.pkcs12_filename,
- pkcs12_password=pki_bailo_config.auth.pkcs12_password,
- headers=mock_auth_headers,
- params=None,
- timeout=pki_bailo_config.timeout_period,
- verify=pki_bailo_config.ca_verify,
- )
-
-
-def test_get_downloads_file_to_output_dir(
- temp_dir, api_mock, null_bailo_config, response_mock, mock_auth_headers
-):
- with open(f"{MINIMAL_MODEL_PATH}/minimal_binary.zip", "rb") as zipfile:
- content = zipfile.read()
-
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 200
- response_mock.content = content
- api_mock._requests_module.get.return_value = response_mock
-
- api_mock.get("/test/url", output_dir=temp_dir)
- api_mock._requests_module.get.assert_called_once_with(
- f"{null_bailo_config.bailo_url}/test/url",
- headers=mock_auth_headers,
- params=None,
- timeout=null_bailo_config.timeout_period,
- verify=null_bailo_config.ca_verify,
- )
- assert os.listdir(temp_dir) == ["model.bin"]
-
-
-def test_post_request_with_response(
- api_mock, response_mock, mock_auth_headers, pki_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 201
- api_mock._requests_module.post.return_value = response_mock
-
- request_body = {"data": "value"}
- api_mock.post("/test/url", request_body=request_body)
- api_mock._requests_module.post.assert_called_once_with(
- f"{pki_bailo_config.bailo_url}/test/url",
- data=request_body,
- headers=mock_auth_headers,
- params=None,
- timeout=pki_bailo_config.timeout_period,
- verify=pki_bailo_config.ca_verify,
- )
-
-
-def test_put_request_with_response(
- api_mock, response_mock, mock_auth_headers, pki_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 201
- api_mock._requests_module.put.return_value = response_mock
-
- request_body = {"data": "value"}
- api_mock.put("/test/url", request_body=request_body)
- api_mock._requests_module.put.assert_called_once_with(
- f"{pki_bailo_config.bailo_url}/test/url",
- data=request_body,
- headers=mock_auth_headers,
- params=None,
- timeout=pki_bailo_config.timeout_period,
- verify=pki_bailo_config.ca_verify,
- )
-
-
-def test_post_pki_request_with_response(
- pki_api_mock, response_mock, mock_auth_headers, pki_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 201
- pki_api_mock._requests_module.post.return_value = response_mock
-
- request_body = {"data": "value"}
- pki_api_mock.post("/test/url", request_body=request_body)
- pki_api_mock._requests_module.post.assert_called_once_with(
- f"{pki_bailo_config.bailo_url}/test/url",
- pkcs12_filename=pki_bailo_config.auth.pkcs12_filename,
- pkcs12_password=pki_bailo_config.auth.pkcs12_password,
- data=request_body,
- headers=mock_auth_headers,
- params=None,
- timeout=pki_bailo_config.timeout_period,
- verify=pki_bailo_config.ca_verify,
- )
-
-
-def test_put_pki_request_with_response(
- pki_api_mock, response_mock, mock_auth_headers, pki_bailo_config
-):
- response_mock.json.return_value = {"response": "success"}
- response_mock.status_code = 201
- pki_api_mock._requests_module.put.return_value = response_mock
-
- request_body = {"data": "value"}
- pki_api_mock.put("/test/url", request_body=request_body)
- pki_api_mock._requests_module.put.assert_called_once_with(
- f"{pki_bailo_config.bailo_url}/test/url",
- pkcs12_filename=pki_bailo_config.auth.pkcs12_filename,
- pkcs12_password=pki_bailo_config.auth.pkcs12_password,
- data=request_body,
- headers=mock_auth_headers,
- params=None,
- timeout=pki_bailo_config.timeout_period,
- verify=pki_bailo_config.ca_verify,
- )
diff --git a/lib/python/tests/unit/client/test_utils.py b/lib/python/tests/unit/client/test_utils.py
deleted file mode 100644
index 51ae9506a..000000000
--- a/lib/python/tests/unit/client/test_utils.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import os
-import re
-from json import JSONDecodeError
-
-import pytest
-
-from bailoclient.client.utils import (
- form_url,
- get_headers,
- handle_response,
- _add_files_to_payload,
-)
-from bailoclient.exceptions import (
- NoServerResponseMessage,
- UnauthorizedException,
-)
-
-from bailoclient.client.utils import get_file_name, get_mime_type
-
-MINIMAL_MODEL_PATH = os.getenv("MINIMAL_MODEL_PATH")
-
-
-def test_form_url_prepends_slash(bailo_url):
- path = "this/is/a/url"
- assert form_url(bailo_url, path) == f"{bailo_url}/{path}"
-
-
-def test_form_url_does_not_prepend_slash(bailo_url):
- path = "/this/is/a/url"
- assert form_url(bailo_url, path) == f"{bailo_url}{path}"
-
-
-def test_get_headers_returns_merged_headers_if_input_headers_provided(mock_auth):
- headers = get_headers(mock_auth, {"new_header": "value2"})
- assert headers == {"header": "value", "new_header": "value2"}
-
-
-def test_get_headers_returns_auth_headers(mock_auth):
- assert get_headers(mock_auth) == {"header": "value"}
-
-
-def test_get_returns_response_json_if_successful(mock_response):
- mock_response.json.return_value = {"response": "success"}
- mock_response.status_code = 201
- assert handle_response(mock_response) == {"response": "success"}
-
-
-def test_handle_response_raises_unauthorised_exception_if_401_error_and_response_json(
- mock_response,
-):
- mock_response.json.return_value = {"response": "failure"}
- mock_response.status_code = 401
- with pytest.raises(UnauthorizedException):
- handle_response(mock_response)
-
-
-def test_handle_response_raises_for_status_if_no_response_json(mock_response):
- mock_response.json.side_effect = JSONDecodeError("msg", "doc", 1)
- mock_response.raise_for_status.side_effect = NoServerResponseMessage(
- f"Server returned 401"
- )
- mock_response.status_code = 401
- with pytest.raises(NoServerResponseMessage, match=re.escape("Server returned 401")):
- handle_response(mock_response)
-
-
-def test_handle_response_calls_decode_file_content_if_an_output_dir_is_provided(
- mock_response, temp_dir
-):
- with open(f"{MINIMAL_MODEL_PATH}/minimal_binary.zip", "rb") as zipfile:
- content = zipfile.read()
-
- mock_response.json.return_value = {"response": "success"}
- mock_response.status_code = 200
- mock_response.content = content
-
- handle_response(mock_response, output_dir=temp_dir)
- assert os.listdir(temp_dir) == ["model.bin"]
-
-
-def test_get_file_name_of_file_correctly():
- assert "responses.json" == get_file_name("tests/data/responses.json")
-
-
-def test_get_file_name_of_dir_correctly():
- assert "data" == get_file_name("data")
-
-
-def test_get_mime_type_json():
- assert "application/json" == get_mime_type("tests/data/responses.json")
-
-
-def test_get_mime_type_json_file_does_not_exist():
- assert "application/json" == get_mime_type("path/does/not/exist/responses.json")
-
-
-def test_get_mime_type_dir_is_none():
- assert get_mime_type("tests/data") is None
-
-
-def test_add_files_to_payload_adds_code_and_binary_files():
- payloads = []
- _add_files_to_payload(
- payloads=payloads,
- binary_file="../../frontend/cypress/fixtures/minimal_binary.zip",
- code_file="../../frontend/cypress/fixtures/minimal_code.zip",
- )
-
- assert len(payloads) == 2
- assert "code" in payloads[0]
- assert "binary" in payloads[1]
- assert "minimal_code.zip" in payloads[0][1]
- assert "minimal_binary.zip" in payloads[1][1]
-
-
-def test_generate_payload():
- pass
diff --git a/lib/python/tests/unit/client/test_validation.py b/lib/python/tests/unit/client/test_validation.py
deleted file mode 100644
index 3d2957f8d..000000000
--- a/lib/python/tests/unit/client/test_validation.py
+++ /dev/null
@@ -1,180 +0,0 @@
-import os
-import re
-import tempfile
-from unittest.mock import patch
-
-import pytest
-
-from bailoclient.client.validation import (
- minimal_keys_in_dictionary,
- validate_model_card,
- validate_metadata,
- validate_file_paths,
- deployment_matches,
- too_large_for_gateway,
-)
-from bailoclient.exceptions import DataInvalid, InvalidMetadata, InvalidFilePath
-from bailoclient.models import Model
-from bailoclient.models.model import ValidationError, ValidationResult
-
-
-MINIMAL_MODEL_PATH = os.getenv("MINIMAL_MODEL_PATH")
-
-
-def test_minimal_keys_in_dict_returns_valid_result_if_both_dictionaries_are_empty():
- result = minimal_keys_in_dictionary({}, {})
-
- assert result == {"valid": True}
-
-
-def test_minimal_keys_in_dict_returns_error_if_dict2_does_not_include_key_from_minimal_dict():
- result = minimal_keys_in_dictionary({"key1": "value1"}, {})
-
- assert not result["valid"]
- assert result["error_message"] == "must contain 'key1'"
-
-
-def test_minimal_keys_in_dict_returns_error_if_dict2_has_empty_value_for_key_from_minimal_dict():
- result = minimal_keys_in_dictionary({"key1": "value1"}, {"key1": None})
-
- assert not result["valid"]
- assert result["error_message"] == "'key1' cannot be empty"
-
-
-def test_minimal_keys_in_dict_returns_error_if_dict2_is_missing_subkeys_from_minimal_dict():
- result = minimal_keys_in_dictionary(
- {"key1": {"key2": "value1"}}, {"key1": "value1"}
- )
-
- assert not result["valid"]
- assert result["error_message"] == "missing data under 'key1'"
-
-
-def test_minimal_keys_in_dict_validates_if_minimal_dict_is_empty():
- result = minimal_keys_in_dictionary({}, {"key1": "value1"})
-
- assert result["valid"]
-
-
-def test_minimal_keys_in_dict_ignores_extra_keys_in_dict2():
- result = minimal_keys_in_dictionary(
- {"key1": "value"}, {"key1": "value1", "key2": "value2"}
- )
-
- assert result["valid"]
-
-
-def test_minimal_keys_in_dict_validates_multilevel_dictionaries():
- result = minimal_keys_in_dictionary(
- {"key1": {"key2": {"key3": "value"}}}, {"key1": {"key2": {"key3": "value"}}}
- )
-
- assert result["valid"]
-
-
-def test_minimal_keys_in_dict_allows_false_as_valid_value():
- result = minimal_keys_in_dictionary(
- {"key1": {"key2": {"key3": "value"}}}, {"key1": {"key2": {"key3": False}}}
- )
-
- assert result["valid"]
-
-
-def test_minimal_keys_in_dict_does_not_allow_empty_string_as_valid_value():
- result = minimal_keys_in_dictionary(
- {"key1": {"key2": {"key3": "value"}}}, {"key1": {"key2": {"key3": ""}}}
- )
-
- assert not result["valid"]
-
-
-@patch("bailoclient.client.client.Model.validate")
-def test_validate_model_card_raises_error_if_model_card_is_invalid(
- patch_validate, bailo_url
-):
- validation_errors = [ValidationError("field", "message")]
- patch_validate.return_value = ValidationResult(validation_errors)
-
- model_card = Model(_schema={"key": "value"})
-
- with pytest.raises(
- DataInvalid,
- match=re.escape(f"Model invalid: {validation_errors}"),
- ):
- validate_model_card(model_card=model_card)
-
-
-@patch(
- "bailoclient.client.validation.minimal_keys_in_dictionary",
- return_value={"valid": False, "error_message": "error"},
-)
-def test_validate_metadata_raises_error_if_metadata_is_invalid(patch_validate_metadata):
- metadata = {"schema": "value"}
-
- with pytest.raises(
- InvalidMetadata,
- match=re.escape("Metadata error - refer to minimal_metadata"),
- ):
- validate_metadata(
- metadata=metadata,
- minimal_metadata_path="../../frontend/cypress/fixtures/minimal_metadata.json",
- )
-
-
-def test_validate_filepaths_raises_error_if_filepath_does_not_exist():
- with pytest.raises(
- InvalidFilePath, match=re.escape("this/path/does/not/exist does not exist")
- ):
- validate_file_paths(
- "this/path/does/not/exist",
- )
-
-
-def test_validate_filepaths_raises_error_if_a_directory_is_uploaded():
- with pytest.raises(
- InvalidFilePath,
- match=re.escape("../../frontend/cypress/fixtures is a directory"),
- ):
- validate_file_paths(
- "../../frontend/cypress/fixtures",
- )
-
-
-def test_validate_file_paths(temp_dir):
- with tempfile.TemporaryFile() as file:
- validate_file_paths(*[file.name, file.name])
-
-
-def test_too_large_for_gateway_fail():
- class Data:
- @property
- def len(self):
- return 100_000_000
-
- assert too_large_for_gateway(Data(), True) is True
-
-
-def test_deployment_matches_returns_false_if_deployment_does_not_match_criteria(
- deployment_1,
-):
- assert not deployment_matches(
- deployment_1,
- deployment_name="incorrect_name",
- model_uuid="id",
- model_version="1",
- )
-
-
-def test_deployment_matches_ignores_version_if_not_provided(deployment_1, deployment_2):
- assert deployment_matches(
- deployment_1,
- deployment_name="deployment_name",
- model_uuid="id",
- model_version=None,
- )
- assert deployment_matches(
- deployment_1,
- deployment_name="deployment_name",
- model_uuid="id",
- model_version=None,
- )
diff --git a/lib/python/tests/unit/conftest.py b/lib/python/tests/unit/conftest.py
deleted file mode 100644
index bef5f14ca..000000000
--- a/lib/python/tests/unit/conftest.py
+++ /dev/null
@@ -1,76 +0,0 @@
-import os
-import tempfile
-import pytest
-
-from bailoclient import Bailo
-from bailoclient.client import Client
-from bailoclient.config import BailoConfig, Pkcs12Config, CognitoConfig
-
-
-os.environ["MINIMAL_MODEL_PATH"] = "../../frontend/cypress/fixtures"
-
-
-@pytest.fixture
-def bailo_url() -> str:
- url = os.getenv("BAILO_URL")
- print(url)
- if url:
- return url
- return "http://bailo.com/test/api"
-
-
-@pytest.fixture
-def temp_dir() -> str:
- with tempfile.TemporaryDirectory() as temp_dir_name:
- yield temp_dir_name
-
-
-@pytest.fixture
-def timeout_period() -> int:
- return 10
-
-
-@pytest.fixture
-def pki_bailo_config(bailo_url, timeout_period) -> BailoConfig:
- return BailoConfig(
- auth=Pkcs12Config(
- pkcs12_filename="test-cert.crt", pkcs12_password="test-password"
- ),
- bailo_url=bailo_url,
- timeout_period=timeout_period,
- )
-
-
-@pytest.fixture
-def cognito_bailo_config(bailo_url, timeout_period) -> BailoConfig:
- return BailoConfig(
- auth=CognitoConfig(
- username="username",
- password="password",
- user_pool_id="user-pool-id",
- client_id="client-id",
- client_secret="client-secret",
- region="region",
- ),
- bailo_url=bailo_url,
- timeout_period=timeout_period,
- )
-
-
-@pytest.fixture
-def null_bailo_config(bailo_url, timeout_period) -> BailoConfig:
- return BailoConfig(
- auth=None,
- bailo_url=bailo_url,
- timeout_period=timeout_period,
- )
-
-
-@pytest.fixture
-def null_client(null_bailo_config) -> Client:
- return Client(null_bailo_config)
-
-
-@pytest.fixture
-def null_bailo(null_bailo_config) -> Bailo:
- return Bailo(null_bailo_config)
diff --git a/lib/python/tests/unit/model_handlers/__init__.py b/lib/python/tests/unit/model_handlers/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/unit/model_handlers/test_model_bundler.py b/lib/python/tests/unit/model_handlers/test_model_bundler.py
deleted file mode 100644
index bdb3f4282..000000000
--- a/lib/python/tests/unit/model_handlers/test_model_bundler.py
+++ /dev/null
@@ -1,611 +0,0 @@
-from unittest.mock import patch, Mock, call
-import os
-import pytest
-import subprocess
-
-from bailoclient.model_handlers.model_bundler import Bundler
-from bailoclient.enums import ModelFlavoursMeta
-from bailoclient.exceptions import (
- ModelFlavourNotFound,
- MissingFilesError,
- ModelTemplateNotAvailable,
- ModelMethodNotAvailable,
-)
-
-
-@pytest.fixture
-def bundler():
- return Bundler()
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._bundle_model_files")
-def test_bundle_model_raises_type_error_if_additional_files_is_not_tuple_or_list(
- patch_bundle_files, bundler
-):
- with pytest.raises(TypeError):
- bundler.bundle_model(output_path=".", additional_files="str")
-
- bundler.bundle_model(output_path=".", additional_files=("file/path",))
- bundler.bundle_model(output_path=".", additional_files=["file/path"])
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._bundle_model_files")
-def test_bundle_model_converts_flavour_to_lower_case(patch_bundle_files, bundler):
- bundler.bundle_model(output_path=".", model_flavour="FLAVOUR")
-
- patch_bundle_files.assert_called_once_with(
- output_path=".",
- model_binary=None,
- model_py=None,
- model_requirements=None,
- requirements_files_path=None,
- model_flavour="flavour",
- additional_files=None,
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._save_and_bundle_model_files")
-def test_bundle_model_does_mlflow_bundling_if_actual_model_file_provided(
- patch_mlflow_bundling,
- bundler,
-):
- model, model_flavour = "model", "invalid_flavour"
- output_path = "."
-
- ModelFlavoursMeta.__contains__ = Mock(return_value=False)
-
- bundler.bundle_model(
- output_path=output_path, model=model, model_flavour=model_flavour
- )
-
- patch_mlflow_bundling.assert_called_once()
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._bundle_model_files")
-def test_bundle_model_bundles_files_if_no_actual_model_file_provided(
- patch_bundle_model_files,
- bundler,
-):
- model, model_flavour = "model", "invalid_flavour"
- output_path = "."
-
- ModelFlavoursMeta.__contains__ = Mock(return_value=False)
-
- bundler.bundle_model(output_path=output_path)
-
- patch_bundle_model_files.assert_called_once()
-
-
-@pytest.mark.parametrize(
- "binary_path, requirements_path",
- [
- (None, "requirements"),
- ("binary", None),
- ],
-)
-def test_bundle_model_files_raises_missing_file_error_if_binary_or_requirements_path_missing(
- binary_path, requirements_path, bundler
-):
- with pytest.raises(MissingFilesError):
- bundler._bundle_model_files(
- output_path=".",
- model_binary=binary_path,
- model_py=None,
- model_requirements=requirements_path,
- requirements_files_path=None,
- model_flavour=None,
- additional_files=None,
- )
-
-
-def test_bundle_model_files_raises_exception_if_no_model_py_provided_and_invalid_or_missing_flavour(
- bundler,
-):
- model_binary, model_requirements, model_flavour = (
- "binary/path",
- "requirements/path",
- "invalid_flavour",
- )
- output_path = "."
-
- ModelFlavoursMeta.__contains__ = Mock(return_value=False)
-
- with pytest.raises(ModelFlavourNotFound):
- bundler._bundle_model_files(
- output_path=output_path,
- model_binary=model_binary,
- model_py=None,
- model_requirements=model_requirements,
- requirements_files_path=None,
- model_flavour=model_flavour,
- additional_files=None,
- )
-
-
-@patch(
- "bailoclient.model_handlers.model_bundler.Bundler._transfer_and_bundle_model_files"
-)
-def test_bundle_model_files_identifies_model_template_if_no_model_py(
- patch_transfer_and_bundle_model_files, bundler
-):
- model_binary, model_requirements, model_flavour = (
- "binary/path",
- "requirements/path",
- "flavour",
- )
- output_path = "."
-
- mock_template = "returned template"
- ModelFlavoursMeta.__contains__ = Mock(return_value=True)
-
- with patch.dict(Bundler.model_py_templates, {"flavour": mock_template}):
- bundler._bundle_model_files(
- output_path=output_path,
- model_binary=model_binary,
- model_py=None,
- model_requirements=model_requirements,
- requirements_files_path=None,
- model_flavour=model_flavour,
- additional_files=None,
- )
-
- patch_transfer_and_bundle_model_files.assert_called_once_with(
- output_path=output_path,
- model_binary=model_binary,
- model_py=mock_template,
- model_requirements=model_requirements,
- requirements_files_path=None,
- additional_files=None,
- )
-
-
-def test_save_and_bundle_raises_exception_model_flavour_not_found(bundler):
- ModelFlavoursMeta.__contains__ = Mock(return_value=False)
-
- with pytest.raises(ModelFlavourNotFound):
- bundler._save_and_bundle_model_files(
- model="", output_path="./", model_flavour="invalid_flavour"
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._get_model_template")
-@patch("bailoclient.model_handlers.model_bundler.Bundler._bundle_model")
-@patch(
- "bailoclient.model_handlers.model_bundler.Bundler._transfer_and_bundle_model_files"
-)
-def test_save_and_bundle_gets_model_py_from_template_if_user_does_not_provide(
- patch_transfer_and_bundle_model_files,
- patch_bundle_model,
- patch_get_template,
- bundler,
-):
- ModelFlavoursMeta.__contains__ = Mock(return_value=True)
- patch_bundle_model.return_value = ("model/path", "mlflow/files/path")
-
- bundler._save_and_bundle_model_files(
- output_path="./", model="", model_flavour="flavour"
- )
-
- patch_get_template.assert_called_once_with("flavour")
-
-
-@patch.dict(
- "bailoclient.model_handlers.model_bundler.Bundler.model_py_templates",
- {},
- clear=True,
-)
-def test_get_model_template_raises_exception_if_no_template(bundler):
- with pytest.raises(ModelTemplateNotAvailable):
- bundler._get_model_template("invalid_flavour")
-
-
-@patch.dict(
- "bailoclient.model_handlers.model_bundler.Bundler.model_py_templates",
- {"torch": "path/to/torch.py"},
- clear=True,
-)
-def test_get_model_template_returns_path(bundler):
- output = bundler._get_model_template("torch")
- assert output == "path/to/torch.py"
-
-
-@patch.dict(
- "bailoclient.model_handlers.model_bundler.Bundler.bundler_functions",
- {},
- clear=True,
-)
-def test_bundle_model_raises_exception_if_bundler_function_not_found(bundler):
- with pytest.raises(ModelMethodNotAvailable):
- bundler._bundle_model("model", "output/path", "flavour")
-
-
-def test_bundle_model_calls_expected_bundler_function(bundler):
- def bundler_function(model, output_path, code_paths):
- return ("model/", [])
-
- mock_bundler_function = Mock(side_effect=bundler_function)
-
- with patch.dict(bundler.bundler_functions, {"flavour": mock_bundler_function}):
- model, additional_files = bundler._bundle_model(
- output_path="output/path", model="model", model_flavour="flavour"
- )
-
- mock_bundler_function.assert_called_once_with(
- output_path="output/path", model="model", code_paths=None
- )
-
-
-def test_bundle_model_returns_normalised_paths(bundler):
- def bundler_function(model, output_path, code_paths):
- return ("model/", ["additional_file/"])
-
- mock_bundler_function = Mock(side_effect=bundler_function)
-
- with patch.dict(bundler.bundler_functions, {"flavour": mock_bundler_function}):
- model, additional_files = bundler._bundle_model(
- "model", "output/path", "flavour"
- )
-
- assert model == "model"
- assert "additional_file" in additional_files
-
-
-import zipfile
-
-
-def test_zip_file_creates_zipfile_at_output_directory_with_one_file(bundler, tmpdir):
- with open(f"{tmpdir}/test.txt", "w") as f:
- f.write("")
-
- os.makedirs(os.path.join(tmpdir, "output"))
-
- file_path = os.path.join(tmpdir, "test.txt")
- zip_path = os.path.join(tmpdir, "output", "output.zip")
-
- bundler._Bundler__zip_file(file_path, zip_path)
-
- assert os.path.exists(zip_path)
-
- with zipfile.ZipFile(zip_path, "r") as zip_ref:
- zip_ref.extractall(f"{tmpdir}/output")
-
- expected_output = os.path.join(tmpdir, "output", "test.txt")
- assert os.path.exists(expected_output)
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._copy_model_py")
-@patch(
- "bailoclient.model_handlers.model_bundler.Bundler._copy_or_generate_requirements"
-)
-@patch("bailoclient.model_handlers.model_bundler.Bundler._copy_optional_files")
-@patch("bailoclient.model_handlers.model_bundler.Bundler._copy_additional_files")
-@patch("bailoclient.model_handlers.model_bundler.Bundler._copy_base_model")
-@patch("bailoclient.model_handlers.model_bundler.Bundler.zip_files")
-@patch("bailoclient.model_handlers.model_bundler.tempfile.TemporaryDirectory")
-def test_transfer_and_bundle_model_files_zips_code_and_binary_folders(
- patch_tempfile,
- patch_zip_files,
- patch_copy_base_model,
- patch_copy_additional_files,
- patch_copy_optional_files,
- patch__copy_or_generate_requirements,
- patch_copy_model_py,
- bundler,
- temp_dir,
-):
- patch_tempfile.return_value.__enter__.return_value = temp_dir
-
- model_binary = "model/binary/path/model.py"
- output_path = "path/to/output/to"
-
- bundler._transfer_and_bundle_model_files(
- model_binary=model_binary,
- output_path=output_path,
- additional_files=[],
- model_requirements="requirements.txt",
- requirements_files_path="",
- optional_files=[],
- model_py="model.py",
- )
-
- patch_zip_files.assert_has_calls(
- [
- call(model_binary, os.path.join(output_path, "binary.zip")),
- call(
- os.path.join(temp_dir, "model", "code"),
- os.path.join(output_path, "code.zip"),
- ),
- ]
- )
-
-
-def test_copy_model_py_copies_file(bundler, tmpdir):
- model_code = os.path.join(tmpdir, "model.py")
- code_path = os.path.join(tmpdir, "model", "code")
- os.makedirs(code_path)
-
- with open(model_code, "w") as f:
- f.write("")
-
- assert os.path.exists(model_code)
-
- bundler._copy_model_py(model_code, code_path)
-
- assert os.path.exists(os.path.join(code_path, "model.py"))
-
-
-def test_copy_base_model_copies_basemodel_directory(bundler, tmpdir):
- output_path = os.path.join(tmpdir, "basemodel")
-
- # directory does not yet exist
- assert not os.path.exists(output_path)
-
- bundler._copy_base_model(output_path)
-
- # directory created
- assert os.path.exists(output_path)
-
- # expected files copied
- assert os.path.exists(os.path.join(output_path, "__init__.py"))
- assert os.path.exists(os.path.join(output_path, "basemodel.py"))
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler.generate_requirements_file")
-def test_copy_or_generate_requirements_generates_requirements_from_model_py_if_requirements_not_provided(
- patch_generate_requirements_file, bundler
-):
- model_requirements = None
- requirements_files_path = None
- model_code = "code.py"
- code_path = "output/path"
-
- bundler._copy_or_generate_requirements(
- model_requirements, requirements_files_path, model_code, code_path
- )
-
- patch_generate_requirements_file.assert_called_once_with(
- model_code, os.path.join(code_path, "requirements.txt")
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler.generate_requirements_file")
-def test_copy_or_generate_requirements_generates_requirements_from_requirements_files_if_not_model_requirements(
- patch_generate_requirements_file, bundler
-):
- model_requirements = None
- requirements_files_path = "requirements/module.py"
- model_code = "code.py"
- code_path = "output/path"
-
- bundler._copy_or_generate_requirements(
- model_requirements, requirements_files_path, model_code, code_path
- )
-
- patch_generate_requirements_file.assert_called_once_with(
- requirements_files_path, os.path.join(code_path, "requirements.txt")
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.copyfile")
-def test_copy_or_generate_requirements_copies_provided_requirements_if_given(
- patch_copyfile, bundler
-):
- model_requirements = "requirements/module.py"
- requirements_files_path = None
- model_code = "code.py"
- code_path = "output/path"
-
- bundler._copy_or_generate_requirements(
- model_requirements, requirements_files_path, model_code, code_path
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.copyfile")
-def test_copy_optional_files_copies_files_from_temp_to_specified_location(
- patch_copyfile, bundler
-):
- output_path = "output/location"
- optional_files = [
- "/tmp/tmpfolder/optional/filepath/file_1.txt",
- "/tmp/tmpfolder/optional/filepath/file_2.txt",
- "/tmp/tmpfolder/different/filepath/file_3.txt",
- ]
-
- bundler._copy_optional_files(optional_files, output_path=output_path)
-
- patch_copyfile.assert_has_calls(
- [
- call(
- "/tmp/tmpfolder/optional/filepath/file_1.txt",
- os.path.join(output_path, "optional", "filepath", "file_1.txt"),
- ),
- call(
- "/tmp/tmpfolder/optional/filepath/file_2.txt",
- os.path.join(output_path, "optional", "filepath", "file_2.txt"),
- ),
- call(
- "/tmp/tmpfolder/different/filepath/file_3.txt",
- os.path.join(output_path, "different", "filepath", "file_3.txt"),
- ),
- ]
- )
-
-
-@patch(
- "bailoclient.model_handlers.model_bundler.Bundler._Bundler__copy_additional_files_from_tempdir"
-)
-def test_copy_additional_files_copies_from_tmpdir_if_files_created_in_tmp(
- patch_copy_additional_files_from_tempdir, bundler
-):
- additional_files = ["tmp/tmpxyz/files/file.txt"]
- model_binary = "tmp/tmpxyz/model.pth"
- code_path = "tmp/tmpabc/code"
-
- bundler._copy_additional_files(
- additional_files, model_binary, "tmp/tmpxyz", code_path
- )
-
- patch_copy_additional_files_from_tempdir.assert_called_once_with(
- additional_files, os.path.join(code_path, "additional_files")
- )
-
-
-@patch(
- "bailoclient.model_handlers.model_bundler.Bundler._Bundler__copy_additional_files_from_local"
-)
-def test_copy_additional_files_copies_from_local_if_no_commonpath_with_tmpdir(
- patch_copy_additional_files_from_local, bundler
-):
- additional_files = ["local/path/files/file.txt"]
- model_binary = "local/path/model.pth"
- code_path = "tmp/tmpabc/code"
-
- bundler._copy_additional_files(
- additional_files, model_binary, "tmp/tmpxyz", code_path
- )
-
- patch_copy_additional_files_from_local.assert_called_once_with(
- additional_files, code_path, os.path.dirname(model_binary)
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.Path.mkdir")
-@patch("bailoclient.model_handlers.model_bundler.copyfile")
-def test_copy_additional_files_from_temp_dir(patch_copyfile, patch_mkdir, bundler):
- additional_files = ["tmp/tmpxyz/files/file1.txt", "tmp/tmpxyz/files/file2.txt"]
- output_path = "tmp/tmpxyz/additional_files"
-
- bundler._Bundler__copy_additional_files_from_tempdir(additional_files, output_path)
-
- patch_copyfile.assert_has_calls(
- [
- call("tmp/tmpxyz/files/file1.txt", os.path.join(output_path, "file1.txt")),
- call("tmp/tmpxyz/files/file2.txt", os.path.join(output_path, "file2.txt")),
- ]
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.os.makedirs")
-@patch("bailoclient.model_handlers.model_bundler.copyfile")
-def test_copy_additional_files_from_local(patch_copyfile, patch_mkdir, bundler):
- additional_files = ["local/path/files/file1.txt", "local/path/files/file2.txt"]
- output_path = "local/path/model/code/additional_files"
- model_parent_path = "local/path"
-
- bundler._Bundler__copy_additional_files_from_local(
- additional_files, output_path, model_parent_path
- )
-
- patch_copyfile.assert_has_calls(
- [
- call(
- "local/path/files/file1.txt",
- os.path.join(output_path, "files", "file1.txt"),
- ),
- call(
- "local/path/files/file2.txt",
- os.path.join(output_path, "files", "file2.txt"),
- ),
- ]
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.ZipFile")
-def test_zip_file_writes_file_to_zip_at_specified_zip_path(patch_zip_file, bundler):
- patch_zip_file.return_value.__enter__.return_value.write = Mock()
-
- file_path = "path/to/file.txt"
- zip_path = "path/for/zip"
-
- bundler._Bundler__zip_file(file_path, zip_path)
-
- patch_zip_file.return_value.__enter__.return_value.write.assert_called_once_with(
- file_path, arcname="file.txt"
- )
-
-
-@patch("bailoclient.model_handlers.model_bundler.Bundler._Bundler__get_output_dir")
-@patch("bailoclient.model_handlers.model_bundler.ZipFile")
-@patch("bailoclient.model_handlers.model_bundler.os.walk")
-def test_zip_directory_creates_zipfile_at_the_output_directory(
- patch_walk, patch_zip_file, patch_get_output_dir, bundler, tmpdir
-):
- code_dir = os.path.join(tmpdir, "code_dir")
- code_data_dir = os.path.join(tmpdir, "code_data_dir")
- output_path = os.path.join(tmpdir, "code.zip")
-
- def get_output_dir(file_path, dir_path):
- if dir_path == code_dir:
- return "code_dir/"
- elif dir_path == code_data_dir:
- return "code_data_dir/"
-
- patch_get_output_dir.side_effect = get_output_dir
- patch_zip_file.return_value.__enter__.return_value.write = Mock()
- patch_walk.return_value = [
- (code_dir, "unused", ["code.txt"]),
- (code_data_dir, "unused", ["data.txt"]),
- ]
-
- bundler._Bundler__zip_directory(tmpdir, output_path)
-
- patch_zip_file.return_value.__enter__.return_value.write.assert_has_calls(
- [
- call(
- filename=os.path.join(code_dir, "code.txt"),
- arcname=os.path.join("code_dir", "code.txt"),
- ),
- call(
- filename=os.path.join(code_data_dir, "data.txt"),
- arcname=os.path.join("code_data_dir", "data.txt"),
- ),
- ],
- )
-
-
-def test_get_output_dir_gets_path_relative_to_the_input_directory(bundler):
- dir_path = "path/to/code"
- sub_dir_path = "path/to/code/data"
-
- path = bundler._Bundler__get_output_dir(dir_path, sub_dir_path)
-
- assert path == "data/"
-
-
-def test_get_output_dir_returns_empty_string_if_paths_are_the_same(bundler):
- file_path = "code"
- dir_path = "code"
-
- path = bundler._Bundler__get_output_dir(file_path, dir_path)
-
- assert path == ""
-
-
-def test_generate_requirements_file_creates_requirements_file_at_filepath(
- bundler, tmpdir
-):
- python_file = os.path.join(
- os.path.dirname(__file__), "../../resources/requirements/file.py"
- )
- output_path = os.path.join(tmpdir, "requirements.txt")
-
- bundler.generate_requirements_file(python_file, output_path)
-
- assert os.path.exists(output_path)
-
- with open(output_path, "r") as f:
- content = f.read()
-
- assert "pydantic" in content
- assert "Requests" in content
-
-
-@patch(
- "bailoclient.model_handlers.model_bundler.subprocess.run",
- side_effect=subprocess.SubprocessError,
-)
-def test_generate_requirements_file_raises_error_if_subprocess_unexpectedly_fails(
- patch_run, bundler, tmpdir
-):
- with pytest.raises(subprocess.SubprocessError):
- bundler.generate_requirements_file("module/path", tmpdir)
diff --git a/lib/python/tests/unit/model_handlers/test_model_loader.py b/lib/python/tests/unit/model_handlers/test_model_loader.py
deleted file mode 100644
index bfa1c25f2..000000000
--- a/lib/python/tests/unit/model_handlers/test_model_loader.py
+++ /dev/null
@@ -1,54 +0,0 @@
-from unittest.mock import patch, Mock
-import pytest
-
-from bailoclient.model_handlers.model_loader import Loader
-from bailoclient.enums import ModelFlavoursMeta
-from bailoclient.exceptions import (
- ModelMethodNotAvailable,
- ModelFlavourNotFound,
-)
-
-
-def loader_function(model_path):
- return model_path
-
-
-def test_load_model_loads_model():
- ModelFlavoursMeta.__contains__ = Mock(return_value=True)
-
- with patch.dict(Loader.model_loaders, {"flavour": loader_function}, clear=True):
- model_loader = Loader()
-
- model_path = "path/to/model"
-
- loaded_model = model_loader.load_model(
- model_path=model_path, model_flavour="flavour"
- )
-
- assert loaded_model == model_path
-
-
-def test_load_model_raises_error_if_the_loader_function_has_not_been_implemented():
- ModelFlavoursMeta.__contains__ = Mock(return_value=True)
-
- with patch.dict(Loader.model_loaders, {}, clear=True):
- model_loader = Loader()
-
- model_path = "path/to/model"
-
- with pytest.raises(ModelMethodNotAvailable):
- loaded_model = model_loader.load_model(
- model_path=model_path, model_flavour="flavour"
- )
-
-
-def test_load_model_raises_error_if_the_flavour_does_not_exit():
- ModelFlavoursMeta.__contains__ = Mock(return_value=False)
-
- model_loader = Loader()
- model_path = "path/to/model"
-
- with pytest.raises(ModelFlavourNotFound):
- loaded_model = model_loader.load_model(
- model_path=model_path, model_flavour="flavour"
- )
diff --git a/lib/python/tests/unit/model_handlers/test_registry.py b/lib/python/tests/unit/model_handlers/test_registry.py
deleted file mode 100644
index df981d411..000000000
--- a/lib/python/tests/unit/model_handlers/test_registry.py
+++ /dev/null
@@ -1,39 +0,0 @@
-from unittest.mock import patch
-from enum import Enum
-
-from bailoclient.model_handlers import Loader, Bundler
-from bailoclient.model_handlers.registry import bundler, loader, template
-
-
-class mock_enum(Enum):
- TEST = "test_value"
-
-
-def test_bundler_decorator_adds_function_to_model_bundlers():
- with patch.dict(Bundler.bundler_functions, {}, clear=True):
-
- @bundler(flavour=mock_enum.TEST)
- def test_bundler():
- pass
-
- assert Bundler.bundler_functions == {"test_value": test_bundler}
-
-
-def test_loader_decorator_adds_function_to_model_loaders():
- with patch.dict(Loader.model_loaders, {}, clear=True):
-
- @loader(flavour=mock_enum.TEST)
- def test_loader():
- pass
-
- assert Loader.model_loaders == {"test_value": test_loader}
-
-
-def test_template_decorator_adds_filepath_to_bundler_templates():
- with patch.dict(Bundler.model_py_templates, {}, clear=True):
-
- @template(flavour=mock_enum.TEST)
- def test_template():
- return "test_path"
-
- assert Bundler.model_py_templates == {"test_value": "test_path"}
diff --git a/lib/python/tests/unit/models/__init__.py b/lib/python/tests/unit/models/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/lib/python/tests/unit/models/test_base.py b/lib/python/tests/unit/models/test_base.py
deleted file mode 100644
index b5d6e1307..000000000
--- a/lib/python/tests/unit/models/test_base.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import pytest
-from bailoclient.models.base import BailoBase
-
-
-@pytest.fixture
-def bailo_base():
- return BailoBase({"_id": "id", "property": "value"})
-
-
-def test_on_init_dictionary_gets_added_as_properties_and_values_to_bailo_base(
- bailo_base,
-):
- assert bailo_base._id == "id"
- assert bailo_base.property == "value"
-
-
-def test_on_init_properties_are_added_by_kwargs():
- bailo_base = BailoBase(_id="id", property="value")
-
- assert bailo_base._id == "id"
- assert bailo_base.property == "value"
-
-
-def test_get_self_without_id_removes_id(bailo_base):
- self_no_id = bailo_base._get_self_without_id()
-
- assert self_no_id == {"property": "value"}
-
-
-def test_str_representation_does_not_include_id(bailo_base):
- assert str(bailo_base) == '{"property": "value"}'
-
-
-def test_dir_bailo_base_does_not_include_id_and_includes_display_and_list_fields(
- bailo_base,
-):
- assert dir(bailo_base) == ["display", "list_fields", "property"]
-
-
-def test_display_returns_str_representation_if_not_printing_to_screen(bailo_base):
- pretty_json = bailo_base.display(to_screen=False)
-
- assert isinstance(pretty_json, str)
-
-
-def test_display_returns_none_if_printing_to_screen(bailo_base):
- assert not bailo_base.display(to_screen=True)
diff --git a/lib/python/tests/unit/models/test_model.py b/lib/python/tests/unit/models/test_model.py
deleted file mode 100644
index 81c5a3bc1..000000000
--- a/lib/python/tests/unit/models/test_model.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-from bailoclient.models.model import Model, ValidationResult
-from bailoclient.exceptions import ModelSchemaMissing
-
-
-def test_model_cannot_be_created_without_schema():
- with pytest.raises(ModelSchemaMissing):
- Model({"property": "vaue"})
-
-
-@pytest.fixture
-def model():
- return Model(
- {"property": "property_value", "property_2": "property_2_value"},
- _schema={"schema": "schema_value"},
- )
-
-
-def test_model_args_and_kwargs_are_added_as_properties(model):
- assert model.property == "property_value"
- assert model.property_2 == "property_2_value"
- assert model._schema == {"schema": "schema_value"}
-
-
-def test_dir_adds_validate_to_list_of_properties(model):
- assert "property" in dir(model)
-
-
-def test_validate_does_a_thing(model):
- assert isinstance(model.validate(), ValidationResult)
-
-
-class MockError:
- def __init__(self, path, message):
- self.path = path
- self.message = message
-
-
-@patch("bailoclient.models.model.jsonschema.Draft7Validator.iter_errors")
-def test_validate_adds_errors_if_formatting_errors(patch_iter_errors, model):
- returned_errors = [
- MockError(path="error/path", message="error1"),
- MockError(path="error2/path", message="error2"),
- ]
-
- patch_iter_errors.return_value = returned_errors
-
- validation_result = model.validate()
-
- assert isinstance(validation_result, ValidationResult)
- assert not validation_result.is_valid
-
- error_messages = [error.description for error in validation_result.errors]
- expected_error_messages = [error.message for error in returned_errors]
-
- assert error_messages == expected_error_messages
diff --git a/lib/python/tests/unit/test_bailo.py b/lib/python/tests/unit/test_bailo.py
deleted file mode 100644
index dfe268a71..000000000
--- a/lib/python/tests/unit/test_bailo.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from unittest.mock import patch
-
-import pytest
-
-from bailoclient.bailo import Bailo
-
-
-@patch("bailoclient.config.BailoConfig.load")
-def test_bailo_loads_config_file(patch_load, null_bailo_config):
- patch_load.return_value = null_bailo_config
- Bailo("./config.yaml")
- patch_load.assert_called_once_with("./config.yaml")
-
-
-def test_bailo_fails_creation_if_incorrect_config_type():
- with pytest.raises(ValueError):
- Bailo(None)
-
-
-def test_minimal_model_metadata(null_bailo):
- assert null_bailo._minimal_metadata is not None
- assert isinstance(null_bailo._minimal_metadata, dict)
-
-
-def test_minimal_deployment_metadata(null_bailo):
- assert null_bailo._minimal_deployment_metadata is not None
- assert isinstance(null_bailo._minimal_deployment_metadata, dict)
-
-
-@patch("bailoclient.bailo.os.path.abspath")
-@patch("bailoclient.model_handlers.model_bundler.Bundler.bundle_model")
-def test_bundle_model_formats_inputs(patch_bundle_model, patch_abspath, null_bailo):
- def mock_abs_path(path):
- if path.endswith("/"):
- return path[0:-1]
- return path
-
- patch_abspath.side_effect = mock_abs_path
-
- output_path = "output_path/"
- model_binary = "model_binary/"
- model_py = "model_py/"
- model_requirements = "model_requirements"
- model_flavour = "pytorch"
- additional_files = ["additional_files/file_1.py", "additional_files/file_2.py"]
-
- null_bailo.bundle_model(
- output_path=output_path,
- model_binary=model_binary,
- model_py=model_py,
- model_requirements=model_requirements,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- patch_bundle_model.assert_called_once_with(
- output_path="output_path",
- model=None,
- model_binary="model_binary",
- model_py="model_py",
- model_requirements=model_requirements,
- requirements_files_path=None,
- model_flavour=model_flavour,
- additional_files=additional_files,
- )
-
- assert patch_abspath.call_count == 6
-
-
-@patch("bailoclient.bailo.os.makedirs")
-@patch("bailoclient.bailo.os.path.exists", return_value=False)
-@patch("bailoclient.model_handlers.model_bundler.Bundler.generate_requirements_file")
-def test_generate_requirements_file_formats_inputs_and_creates_output_directory_if_it_does_not_exist(
- patch_generate_requirements_file, patch_exists, patch_makedirs, null_bailo
-):
- module_path = "/path/"
- output_path = "./output/somewhere"
-
- null_bailo.generate_requirements_file(module_path, output_path)
-
- expected_output_dir = "output/somewhere"
- expected_module_path = "/path"
- expected_output_path = "output/somewhere/requirements.txt"
-
- patch_makedirs.assert_called_once_with(expected_output_dir, exist_ok=True)
- patch_generate_requirements_file.assert_called_once_with(
- expected_module_path, expected_output_path
- )
diff --git a/lib/python/tests/unit/test_config.py b/lib/python/tests/unit/test_config.py
deleted file mode 100644
index be2c3c502..000000000
--- a/lib/python/tests/unit/test_config.py
+++ /dev/null
@@ -1,161 +0,0 @@
-import pytest
-from unittest.mock import patch
-
-from bailoclient.config import *
-
-
-COGNITO_ENVIRON = {
- "COGNITO_USERNAME": "username",
- "COGNITO_PASSWORD": "password",
- "COGNITO_USERPOOL": "userpool",
- "COGNITO_CLIENT_ID": "client-id",
- "COGNITO_CLIENT_SECRET": "client-secret",
- "COGNITO_REGION": "region",
- "BAILO_URL": "http://bailo",
-}
-PKI_ENVIRON = {
- "PKI_CERT_PATH": "cert.pem",
- "PKI_CERT_PASSWORD": "password",
- "BAILO_URL": "http://bailo",
-}
-NULL_ENVIRON = {"BAILO_URL": "http://bailo", "BAILO_CA_CERT": "False"}
-
-
-@patch.dict(os.environ, COGNITO_ENVIRON, clear=True)
-def test_create_cognito_config_from_env():
- config = CognitoConfig.from_env()
- assert config.username == "username"
- assert config.password == "password"
- assert config.user_pool_id == "userpool"
- assert config.client_id == "client-id"
- assert config.client_secret == "client-secret"
- assert config.region == "region"
-
-
-@patch.dict(os.environ, {}, clear=True)
-def test_create_cognito_config_from_env_fails_missing_env():
- with pytest.raises(KeyError):
- CognitoConfig.from_env()
-
-
-@patch.dict(os.environ, COGNITO_ENVIRON, clear=True)
-def test_create_bailo_config_from_env_cognito():
- config = BailoConfig.from_env(AuthType.COGNITO)
- assert config.auth == CognitoConfig.from_env()
- assert config.bailo_url == "http://bailo"
-
-
-@patch.dict(os.environ, {}, clear=True)
-def test_create_bailo_config_from_env_fails_missing_env_cognito():
- with pytest.raises(KeyError):
- BailoConfig.from_env(AuthType.COGNITO)
-
-
-@patch.dict(os.environ, COGNITO_ENVIRON, clear=True)
-def test_create_bailo_config_from_env_fails_wrong_auth_type_cognito():
- with pytest.raises(KeyError):
- BailoConfig.from_env(AuthType.PKI)
-
-
-@patch.dict(os.environ, PKI_ENVIRON, clear=True)
-def test_create_pki_config_from_env():
- config = Pkcs12Config.from_env()
- assert config.pkcs12_filename == "cert.pem"
- assert config.pkcs12_password == "password"
-
-
-@patch.dict(os.environ, {}, clear=True)
-def test_create_pki_config_from_env_fails_missing_env():
- with pytest.raises(KeyError):
- Pkcs12Config.from_env()
-
-
-@patch.dict(os.environ, PKI_ENVIRON, clear=True)
-def test_create_bailo_config_with_pki_from_env():
- config = BailoConfig.from_env(AuthType.PKI)
- assert config.auth == Pkcs12Config.from_env()
- assert config.bailo_url == "http://bailo"
-
-
-@patch.dict(os.environ, PKI_ENVIRON, clear=True)
-def test_create_bailo_config_from_env_fails_wrong_env_pki():
- with pytest.raises(KeyError):
- BailoConfig.from_env(AuthType.COGNITO)
-
-
-@patch.dict(os.environ, NULL_ENVIRON, clear=True)
-def test_create_bailo_config_from_env_null():
- config = BailoConfig.from_env(AuthType.NULL)
- assert config.auth is None
- assert config.bailo_url == "http://bailo"
- assert config.ca_verify is False
-
-
-@patch.dict(os.environ, {}, clear=True)
-def test_create_bailo_config_from_env_fails_missing_env_null():
- with pytest.raises(KeyError):
- BailoConfig.from_env(AuthType.NULL)
-
-
-def test_bailo_config_load_save_cognito(temp_dir):
- """Check we can save a configuration file and then reload it"""
-
- config = BailoConfig(
- auth=CognitoConfig(
- username="test-username",
- password="test-password",
- user_pool_id="a4985vnqw094tn4itbjmpq0e598uytn[qv30957u",
- client_id="pv95n76q5q3b698qn354096b8uQ£%B^$%^",
- client_secret="4qb4359068nrjtyvmne5pouybe5YNQ$£uye6bvu 6",
- region="eu-west-2",
- ),
- bailo_url="https://www.example.com/api",
- ca_verify=False,
- )
-
- config_path = os.path.join(temp_dir, "conf.yaml")
-
- config.save(config_path)
- loaded_config = BailoConfig.load(config_path)
-
- assert isinstance(loaded_config, BailoConfig)
- assert isinstance(loaded_config.auth, CognitoConfig)
- assert config == loaded_config
-
-
-def test_bailo_config_load_save_pki(temp_dir):
- """Check we can save a configuration file and then reload it"""
-
- config = BailoConfig(
- auth=Pkcs12Config(pkcs12_filename="cert.pem", pkcs12_password="password"),
- bailo_url="https://www.example.com/api",
- ca_verify=False,
- )
-
- config_path = os.path.join(temp_dir, "conf.yaml")
-
- config.save(config_path)
- loaded_config = BailoConfig.load(config_path)
-
- assert isinstance(loaded_config, BailoConfig)
- assert isinstance(loaded_config.auth, Pkcs12Config)
- assert config == loaded_config
-
-
-def test_bailo_config_load_save_null(temp_dir):
- """Check we can save a configuration file and then reload it"""
-
- config = BailoConfig(
- auth=None,
- bailo_url="https://www.example.com/api",
- ca_verify=False,
- )
-
- config_path = os.path.join(temp_dir, "conf.yaml")
-
- config.save(config_path)
- loaded_config = BailoConfig.load(config_path)
-
- assert isinstance(loaded_config, BailoConfig)
- assert loaded_config.auth is None
- assert config == loaded_config