From f8ee2df875c94964fc73604bbe21683fb8ae4d12 Mon Sep 17 00:00:00 2001 From: James Andrews Date: Fri, 10 May 2019 09:54:16 +0930 Subject: [PATCH] Initial commit --- .gitignore | 1 + AUTHORS.md | 2 + README.md | 1 + docs/Makefile | 177 +++++++++++++ docs/conf.py | 56 ++++ docs/index.rst | 34 +++ docs/integration/api/authentication.md | 39 +++ docs/integration/api/classification_bulk.md | 34 +++ .../integration/api/classification_request.md | 175 +++++++++++++ .../api/classification_response.md | 64 +++++ docs/integration/api/philosophy.md | 9 + docs/integration/basics/getting_started.md | 60 +++++ docs/integration/basics/lifecycle.md | 35 +++ docs/integration/basics/overview.md | 18 ++ docs/integration/basics/sharing.md | 35 +++ docs/integration/basics/variant_matching.md | 87 +++++++ docs/integration/evidence_keys/overview.md | 15 ++ docs/integration/evidence_keys/types.md | 52 ++++ docs/make.bat | 242 ++++++++++++++++++ docs/site/classification_diffs.md | 3 + docs/site/classification_discordance.md | 3 + docs/site/classification_form.md | 3 + docs/site/classification_listing.md | 3 + docs/site/users.md | 3 + 24 files changed, 1151 insertions(+) create mode 100644 .gitignore create mode 100644 AUTHORS.md create mode 100644 README.md create mode 100755 docs/Makefile create mode 100755 docs/conf.py create mode 100755 docs/index.rst create mode 100644 docs/integration/api/authentication.md create mode 100644 docs/integration/api/classification_bulk.md create mode 100644 docs/integration/api/classification_request.md create mode 100644 docs/integration/api/classification_response.md create mode 100644 docs/integration/api/philosophy.md create mode 100644 docs/integration/basics/getting_started.md create mode 100644 docs/integration/basics/lifecycle.md create mode 100644 docs/integration/basics/overview.md create mode 100644 docs/integration/basics/sharing.md create mode 100644 docs/integration/basics/variant_matching.md create mode 100644 docs/integration/evidence_keys/overview.md create mode 100644 docs/integration/evidence_keys/types.md create mode 100755 docs/make.bat create mode 100644 docs/site/classification_diffs.md create mode 100644 docs/site/classification_discordance.md create mode 100644 docs/site/classification_form.md create mode 100644 docs/site/classification_listing.md create mode 100644 docs/site/users.md diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..53752db --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +output diff --git a/AUTHORS.md b/AUTHORS.md new file mode 100644 index 0000000..a154c56 --- /dev/null +++ b/AUTHORS.md @@ -0,0 +1,2 @@ +* David Lawrence +* James Andrews \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..203f5f5 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +Please see our readthedocs page here... (to be linked) \ No newline at end of file diff --git a/docs/Makefile b/docs/Makefile new file mode 100755 index 0000000..c04268f --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ReadtheDocsTemplate.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ReadtheDocsTemplate.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/ReadtheDocsTemplate" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ReadtheDocsTemplate" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/conf.py b/docs/conf.py new file mode 100755 index 0000000..811455e --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,56 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'Shariant' +copyright = '2019, AGHA' +author = 'Australian Genomics Health Alliance' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'recommonmark', + 'sphinx_markdown_tables' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +master_doc = 'index' diff --git a/docs/index.rst b/docs/index.rst new file mode 100755 index 0000000..2912e6b --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,34 @@ +Welcome to Shariant Integration documentation +================================================== + +.. toctree:: + :maxdepth: 1 + :caption: Contents: + + integration/basics/overview + integration/basics/getting_started + integration/basics/lifecycle + integration/basics/sharing + integration/basics/variant_matching + + integration/evidence_keys/overview + integration/evidence_keys/types + + integration/api/philosophy + integration/api/authentication + integration/api/classification_request + integration/api/classification_response + integration/api/classification_bulk + + site/users + site/classification_listing + site/classification_form + site/classification_discordance + site/classification_diffs + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` \ No newline at end of file diff --git a/docs/integration/api/authentication.md b/docs/integration/api/authentication.md new file mode 100644 index 0000000..df45e9a --- /dev/null +++ b/docs/integration/api/authentication.md @@ -0,0 +1,39 @@ +# API Authentication + +The Shariant API will require authentication to perform any operation. The authentication is currently provided through OAuth2 via Keycloak. + +The OAuth URL is +`https://shariant.org.au/auth/realms/agha/protocol/openid-connect/token` +provide a client_id of +`shariant-client-tool` + +Here's some sample Python code that will be able to login + +```python + +from requests_oauthlib.oauth2_auth import OAuth2 +from requests_oauthlib.oauth2_session import OAuth2Session +from oauthlib.oauth2 import LegacyApplicationClient + +def ping() { + client_id = 'shariant-client-tools' + username = 'xxxx' #not an actual login, please subsitute with a username and password we've provided you + password = 'yyyy' + + oauth = OAuth2Session(client=LegacyApplicationClient( + client_id=client_id + )) + token = oauth.fetch_token( + token_url='https://shariant.org.au/auth/realms/agha/protocol/openid-connect/token', + username=username, + password=password, + client_id=client_id + ) + auth = OAuth2(client_id = client_id, token = token) + + r = requests.get('https://shariant.org.au/variantclassification/api/evidence_keys', auth=auth) +} +``` + +Note that any classifications you make through the API will be assigned to the username used here by default. +Records "owner" propert \ No newline at end of file diff --git a/docs/integration/api/classification_bulk.md b/docs/integration/api/classification_bulk.md new file mode 100644 index 0000000..910a111 --- /dev/null +++ b/docs/integration/api/classification_bulk.md @@ -0,0 +1,34 @@ +# API Variant Classification BULK GET, POST + +## Bulk POST +To perform multiple operations in one call, simply post a JSON array instead of a dictionary, e.g. +```json +{ + "records":[ + {"id":77, "patch": {"ps1":"NM"}}, + {"id":78, "delete": true}, + ] +} +``` + +## Bulk GET +To perform a bulk export request a GET to `https://shariant.org.au/variantclassification/api/classifications/record/` + +URL parameters include + +Parameters + +|parameter|value|description| +|---------|-----|-----------| +|level|editable|Will return records the current user has write access to and will be the latest version of those records including unshared changes.| +|level|public|Will only return records that have been published to be shared with 3rd party labs| +|level|loggd_in_users|Will only return records that have been published to be shared within Shariant| +|output|json|(Default) Output will be an JSON object with a key "records' containing all the relevant records.| +|output|csv|Output will be a CSV file.| +|output|redcap|Output will be a CSV file structured for REDCap. Note that only records with a value in redcap_record_id will be included.| +|group_by|_any_|If group_by has a value set then a zip file will be produced. Each group will be a json array of CSV file based on the output. e.g. `?group_by=variant` If omitted then the output will be a single CSV or JSON array file.| +|group_by|all|Places all the variant classifications in a single zip entry| +|group_by|none|Places each of the variant classification in a separate zip entry the entries named by lab identifier / lab record id| +|group_by|variant|Creates one zip entry per variant, the entry will have Shariant's internal ID| +|limit|_number_|Limits the number of rows returned. Warning, if grouping will limit the number of records randomly across different groups, so should be avoided in such a case| +|after|_unix timestamp_|If after has a value, only records that were last shared after will be returned. Be careful that this refers to the time the record was published, NOT the date of the last modification| \ No newline at end of file diff --git a/docs/integration/api/classification_request.md b/docs/integration/api/classification_request.md new file mode 100644 index 0000000..d6cb3b3 --- /dev/null +++ b/docs/integration/api/classification_request.md @@ -0,0 +1,175 @@ +# API Variant Classification GET/POST +All operations against a variant classification submission from your lab can be performed against a single end point. + +That end point being +https://shariant.org.au/variantclassification/api/classifications/record/ + +## Example Post + +```json +{ + "id":"xyz_pathology/north_street_lab/F03432", + "upsert": { + "literature": "Found a book PMID: 342244" + }, + "publish": "institution", + "editable": true, + "return_data": "changes" +} +``` +or +```json +{ + "id":"xyz_pathology/north_street_lab/F03432", + "delete": true +} +``` + +## Example GET + +`https://shariant.org.au/variantclassification/api/classifications/record/xyz_pathology/north_street_lab/F03432?data=flat&config=true` + +## All Parameters + +|POST|GET|Effect| +|----|---|------| +|"id":_xxx_|in URL|See Id Part| +|"create":{} _or_ "patch":{} _or_ "overwrite":{} _or_ "upsert":{} _or_ "data":{}|N/A|See Evidence Operation| +|"publish":"_level_"|N/A|see Publish| +|"delete":true|N/A|See Deleting | +|"test":true|N/A|The effects of your POST operation will not be saved on the server. This is handy for validation.| +|"config":true|?config=true|Will return your lab’s evidence key configuration that will be merged with the default configuration for the web form. For the sake of syncing data, ignore this option.| +|"editable": true|N/A|Will not mark uploaded values as for web form users, i.e. they'll be able to overwrite what was uploaded.| +|"return_data": "true"|(default)|Will return the complete list of evidence stored against this record| +|"return_data": "flat"|?data=flat|As above but not-nested, so a note would be returned as `"c_hgvs.note":true` +|"return_data": "changes"|N/A|Returns only the evidence that changed as a result of your POST| + +## ID Part +To uniquely identify the record, its id can be part of the URL, or provided as part of JSON submission. + +While we will assign a Sharint ID to all records, upon creation of a record you can assign it your own internal ID that only needs to be unique within your lab. You can then continue to use your internal ID for all future references. + +The ID part of a submission consists of the following parts: + +|part|description| +|----|-----------| +|lab_id|identifier assigned to your lab, will be composed of the lab’s parent body and lab id, e.g. `xyz_pathology/north_street_lab`.| +|lab_record_id|this is your internally unique identifier that can be any string. If no value is provided for the creation of a new record, a UUID will be automaticall assigned.| +|rid|this is Shariant’s id for the record. It can only be referenced for existing records and can not be dictated for new records. If provided then lab and lab_recordid should be omitted.| +|version|This will be a UNIX timestamp of the version when it was created. If version is omitted then you will be dealing with the editable version of the record on a POST (if you have access), or the latest version your account has access to otherwise.| + +e.g. + +```json +{"id":"xyz_pathology/north_street_lab/F03432.1557189685.485863"} +``` +or +```json +{"id":{ + "lab_id":"xyz_pathology/north_street_lab", + "lab_record_id":"F03432", + "version":1557189685.485863 +}} +``` +If referring to this information in the URL instead of JSON, it will be in the form of +
+`https://shariant.org.au/variantclassification/api/classifications/record/[lab]/[lab_record_id](.version)` where the (.version) is optional +e.g. +`https://shariant.org.au/variantclassification/api/classifications/record/xyz_pathology/north_street_lab/F03432.1557189685.485863` + +OR + +`https://shariant.org.au/variantclassification/api/classifications/record/[rid](.version)` where the (.version) is optional +`https://shariant.org.au/variantclassification/api/classifications/record/1453.1557189685.485863` +
+ +## Versions +Any change to the evidence of a record will automatically create a new version of that record. +(The exception being if multiple changes are done to a record by the same user in the same method within one minute, those changes will be merged into one version). + +A version is the UNIX timestamp (with decimal places) of when the record was created. +If a value of version is provided in the ID part, then you will be referring to the read only version of the variant classification and won’t be able to perform any operations against it. +Versions can un-published after being published, but more up to date versions can be published to become the new default. + +## Evidence Operation + +Evidence part can be provided under one of the following. A submission can only provide one at most of these elements at a time. +### create +Provide evidence for a new record. Will error if the id part matches an existing record. + +### patch +Evidence included in this will be merged with the existing evidence of a record. Will error if the id part doesn’t match an existing record. + +### overwrite +Like patch, except all existing evidence will be overwritten. + +### upsert or data +Will either act as create or patch depending on if the id part matches an existing record. + +It is suggested that you only submit with upsert, as it stops labs from having to keep track of if a record has already been submitted. + +## Evidence Format +The content of create/patch/overwrite/upsert will be using keys as seen in the evidence keys section, with keys matching a value or a dictionary with a key of value or note. +Notes allow you to associate arbitrary text with an evidence key, especially useful for boolean or (multi)select fields. + +The evidence itself should in the form of `key : value` +or `key: { “value”: value, “note”: note}` +e.g. +```json +{ + "id": "...", + "upsert": { + "literature": "Found a book PMID: 342244", + "affected_status": { + "value": "yes", + "note": "Need to double check this" + }, + "condition": { + "value": null + }, + "search_terms": null + } +} +``` + +How the data is provided is important for mixing the use of API and the web form. +By default, any key with anything on the right hand side other than a straight `null` will be immutable on the web form. +This is because your own curation system is deemed to be the source of truth about a variant classification. We want to avoid a scenario where users fix data on Shariant and then your curation system is out of date. This could be followed by another sync operation where the curations out of data data then upsets over the top of the correct data in Shariant. +The selective immutability will allow web form users to provide values for keys that your curation system can’t provide if necessary. +Importantly web user’s ability to change the text for a “note” is not affected by the immutability status. + +Formats + +`key : null` This completely blanks out any value associated with the key. Value, note, immutability will all be reset. +`key : value` This will set the value for a key as well as wiping any note. Web form immutability will be set unless otherwise configured. +`key : { “value”: "", “note”: "" }` If only value or note are provided, this will merge with existing data. e.g. only providing value will leave any existing note untouched. Immutability will be set. + +The preferred method is `key: {“value”: x}` (with note only if your curation system can send notes). For records that don’t have values for certain keys that you would normally sync, provide `key: {“value”: null}` instead of omitting the entry all together. This is so immutability is set appropriately. + +## Sharing +You can provide a publish flag in a POST. If create/patch/overwrite/upsert is provided, the publish will relate to the record as it is after applying that change. +Note that you cannot un-publish a version, just publish more up to date versions. + +To publish include a value for publish in your JSON body. +e.g. +```json +{ + "id": "67", + "publish": "institution" +} +``` +### institution +Visible to any user that belong to a lab that belongs to the same institution as the lab the record was created against. + +### logged_in_users +Visible to all logged in Shariant users and will be included in Shariant exports to other labs around Australia. + +### global +Allowed to be shared with Clinvar or other 3rd party systems. + +Each share level is inclusive of all previous share levels. If on Monday you published a record to level 3, then on Tuesday you published the same record to level 2 - general Shariant users will have read-only access to the record as it was on Monday, but users within your institution will have access to the more up to date version as it was on Tuesday. + +## Deleting + +Records that haven't been shared with logged in users or global can be deleted by including +`"delete": true` \ No newline at end of file diff --git a/docs/integration/api/classification_response.md b/docs/integration/api/classification_response.md new file mode 100644 index 0000000..133b020 --- /dev/null +++ b/docs/integration/api/classification_response.md @@ -0,0 +1,64 @@ +# API Variant Classification Response + +An example of the result of a GET or POST below +```json +{ + "id": 31, + "lab_record_id": "77", + "institution_name": "SA Pathology", + "lab_id": "sa_pathology/unit_1", + "lab_name": "SA Pathology Unit 1", + "title": "SA Pathology Unit 1 31", + "user": "varadmin", + "version": 1544864360.852882, + "can_write": true, + "can_write_latest": true, + "flag_collection": 13, + "has_changes": true, + "last_edited": 1553238219.242146, + "variant_id": 483370, + "variant": "11:103048382 T>A", + "data": { + "age": { + "value": "76" + }, + "g_hgvs": { + "value": "NC_012980.1:m.1898A>G" + }, + "sample": { + "value": "blood" + }, + "zygosity": { + "value": "heteroplasmic", + "note": "xyz" + } + }, + "messages": [ + { + "key": "clinical_significance", + "code": "mandatory", + "message": "Missing mandatory value", + "severity": "error" + } + ], +} +``` + +|key|meaning| +|---|-------| +|can_write|Can the current user perform modification operations on this record. Note if the record was requested with a version, can_write will always be false.| +|can_write_latest|If you are viewing a classification at a specific version, would the user be able to edit the working copy for this classification.| +|data|Is present by default on a GET but not on a POST. The complete set of evidence saved against this record. Format/Contents can be changed by using return_data in POST or data in GET| +|flag_collection|Id for as the yet unpublished flag API (used for discordance resoluton and notifications)| +|id|This is the Shariant ID, referred to as `rid` in other contexts.| +|institution_name|English friendly name of the institution/organisation which the lab belongs to.| +|lab_id|How the lab should be referred to via API. Will be in the form of instiution_id/lab_part_id| +|lab_name|English friendly name of the lab which the record belongs to.| +|lab_record_id|The id the lab associated with this record.| +|last_edited|The date (in unix time) when the working copy of the record was last edited| +|messages|Any validation messages associated with the data of the record.| +|title|English friendly title of the record. Does not refer to the variant or the classification.| +|user|The Shariant user that owns this record.| +|variant|Chromosome, Position, Ref and Alt| +|variant_id|Shariant's internal database id for this record (It wont be meaningful outside of Shariant)| +|version|The version we're looking at. It will be the unix time when the record was edited before it was submitted, NOT the time it was submitted. e.g. if you edited the record on Jan 1st and submitted it on Feb 2nd, the version will be the unix time for Jan 1st| \ No newline at end of file diff --git a/docs/integration/api/philosophy.md b/docs/integration/api/philosophy.md new file mode 100644 index 0000000..14cab0f --- /dev/null +++ b/docs/integration/api/philosophy.md @@ -0,0 +1,9 @@ +# API Philosophy + +There are some fundamental principals that we're currently applying to the API + +* To be able to do the majority of modifications through the one end-point with different parameters. This was decided to help support batch submissions in future. +* As long as the API submission is well formatted JSON, nearly everything uploaded will create a new (or alter an existing) variation classification record. The record itself may be marked with errors but they can be fixed with subsequent API calls or interaction with the web UI. +* There will be a level of normalisation applied to submitted data, case insensitive "true", "T", 1 will be converted to `true` - and "false", "F" and 0 to `false` for boolean fields, drop down field values will have their case corrected. The level of normalisation will be limited as to reduce unexpected behaviour. +* We'll be mainly accepting free form text, so citing of publications or other resources will be achieved by parsing through the text looking for patterns, such as "PMID: XXXX" rather than requiring special structure in the submission. +* Under no circumstances should patient identifiable data be uploaded. \ No newline at end of file diff --git a/docs/integration/basics/getting_started.md b/docs/integration/basics/getting_started.md new file mode 100644 index 0000000..9b5de90 --- /dev/null +++ b/docs/integration/basics/getting_started.md @@ -0,0 +1,60 @@ +# Integration Getting Started + +Steps + +## Step 1 : Defining the sync tool + +Shariant accepts the upload of classifications via REST and is authenticated via OAUTH2. +Shariant will not initiate any communication to your system (e.g. you wont have to open any ports or provide us authentication) instead it waits for uploads and requests for downloads. + +You will need to have a tool running with access to your curation data on your network/cloud. This will be responsible for providing classifications to Shariant and if possible, automatically providing Shariant classifications as annotations to your system. + +If your curation system is one that we've seen before, we can get you started with integration code. +If it's new, we do have a python library that will provide some structure around the calls. +If your IT team already has a series of integrations, they're welcome to use any existing framework to perform the syncing. + +## Step 2 : Accessing and identifying data for the sync tool + +Your curation system may have an API to access its data, it might have a database on your network that you can query or you might have to do a regular extract to a file and process that. +In addition it is unlikely you'll want to upload all records from your system immediately. Some systems may let you add arbitrary tags to your system or you may already have a column to indicate that the classification is in a complete state. + +Some additional questions you may want to ask at this point: +How frequently will the sync process take place? +Will you re-upload all relevant records or only the ones that have changed? + + +## Step 3 : Mapping from your structure to Shariants + +Given any individual field you can enter in your system, how does it map to our pre-registered set of evidence keys? +The tool might just be able to rename the field, it might be more complicated and need to parse through the value to extract segments, or the field might indicate a value for one of our drop downs. + +For example, if your system has a Yes/No field called "This gene is known to be associated with X-linked recessive disease." that would map to our field "mode_of_inheritance" with a value of "x_linked_recessive". + +### Minimum mandatory set + +Shariant enforces a small base level of required fields +* Fields to identify the variant, e.g. c.hgvs including gene symbol, ref and alt along with a transcript and a build (hg19 or 38) +* Lab record id: An ID you provide for the record so you can refer to it in future +* Clinical significance: How have you classified this on ACMG’s scale of Benign to Pathogenic +* Condition: What condition are you curating against +* Zygosity: Zygosity in the tested individual. + +### Avoid personal identifiable information + +You will also need to ensure you don't send us any information that could be used to identify the patient, specifically avoid names and addresses. +You will also need to ensure users don't enter such information into summaries or other fields you are mapping from your curation system. + +## Step 4 : Maintenance & user interaction + +It's possible that a small number of your records fail our variant mapping process, or miss out on mandatory information. +In addition, there will be valid records that run into discordance with classifications from other labs. + +Individual classifications can be assigned to different users, if there's something on a classification that requires attention, the linked user will be notified by email. +Some issues can be fixed by updating your curation system and waiting for the next sync, others will require interaction with the Shariant website. + +Work out which users will be repsonsible for which records, though any member of your lab has access to fix any issue with any of your lab's classifications. + +## Step 5 : How best can your system integrate data from Shariant + +Shariant provides an API for bulk downloading of classifications. +Currently we only provide classifications in our own JSON format, but on the roadmap is support for more export options such as VCF. It is expected that some systems will need custom solutions here, so some work may get done as the requirements come in. \ No newline at end of file diff --git a/docs/integration/basics/lifecycle.md b/docs/integration/basics/lifecycle.md new file mode 100644 index 0000000..fdb6038 --- /dev/null +++ b/docs/integration/basics/lifecycle.md @@ -0,0 +1,35 @@ +# Variant Classification Lifecycle + +Here's a general view of what will happen to a classification. +There are more details about each step. + +## Assign an internal id + +Before a record is uploaded to Shariant, it should be given an alpha-numeric id that is unique to your lab. +This "lab_record_id" can then be referenced in the future to refer to the classification. + +## Uploaded to Shariant + +Along with your "lab_record_id" the sync tool will upload your evidence data to Shariant. If a record already exists with that id it will update the fields you provide, otherwise a new record will be created. + +## Variant matching + +Using the fields you've provided, Shariant will start a process of variant matching for that classification. The record will be given a "Matching Variant" flag. Shortly after that flag will be automatically closed, if a variant couldn't be matched the record will now be flagged as "Matching Variant Failed" and the record will need to be deleted and attempted to be imported again. + +## Submitting/Sharing + +As the sync tool uploads the records, it can also request that they be submitted to a specific share level. Share levels (covered elsewhere) determine who can see your record, including exporting to 3rd parties. +Note that records must be free of errors to be submitted and shared. + +## Discordance + +Once you've shared a record wider than your organisation, it will be included in discordance calculations. If another classification for the same variant (even from your lab) has come to a different clinical significance the records will be marked with a "Discordance" flag. + +## Updates + +The same record can be uploaded an indefinate number of times. Shariant will store a version for each change, presenting the latest record to other users but previously submitted versions will still be accessible. +As your curation system is considered the "source of truth", the cycle that includes re-uploading classifications is vital. + +## Clinvar Submission + +TODO \ No newline at end of file diff --git a/docs/integration/basics/overview.md b/docs/integration/basics/overview.md new file mode 100644 index 0000000..392f68e --- /dev/null +++ b/docs/integration/basics/overview.md @@ -0,0 +1,18 @@ +# Integration Overview + +You have variant classifications in your own curation system and you want them to be in ours. + +What are the basic goals here: + +* Ensure we can consistantly match records to variants. +* Compare classifications (across different labs) for the same variant in a meaningful way. +* Format the classification for submission to other databases such as Clinvar, which will have their own structure requirements. +* (Avoid providing patient identifiable information). + +Also we have variant classifications in our database and you want access to them in your curation system. + +What are the basic goals here: + +* Ability to retrieve an extract from our Shariant and import it in a different track into yours +and/or +* Be able to quickly use the web interface to search for relevant variants \ No newline at end of file diff --git a/docs/integration/basics/sharing.md b/docs/integration/basics/sharing.md new file mode 100644 index 0000000..ce05fa2 --- /dev/null +++ b/docs/integration/basics/sharing.md @@ -0,0 +1,35 @@ +# Variant Classification Sharing + +Shariant has the following concepts for handling who has access to what + +|Body|Meaning| +|----|-------| +|Organisation|Sometimes labelled Institution, e.g. SA Pathology, VCGS| +|Lab|A lab belongs to an organisation, e.g. SA Pathology’s Familial Cancer, Frome Road| +|User|A user has their own Shariant login. Users should match real people one to one, with the exception of a user account specifically setup to sync records between a lab’s system and Shariant.| +|Variant Classification|A variant classification will be owned by a user and a lab| + +A user can belong to multiple labs, though typically a user will only belong to one. + +Variant Classifications can be seen in two modes. + +* The live editable copy. +* A read-only version shared at a given point in time. + +If you or someone from your lab created a variant classification, you will be dealing with editable copy. +If someone from outside your lab shares a record with you, you will be dealing with the specific version that they deemed to share. If they make changes and share it again, you will then have access to the new version. This is inversely true of records you share. + +Users with access to the editable version can elect to share the record in its current state as long as there are no outstanding validation errors. This will give other users read only access to the data as it is when the publish action was performed. + +Sharing can be done at several levels. Each level encompasses the level before it, and once it's shared at a certain level it can only be shared at that level or higher in future. The share levels are: + +|Share Level|Who Can See| +|-----------|-----------| +|Lab|Will be available to the lab| +|Organisation|Every lab belonging to the institution/organisation that the owning lab belongs to can see this version| +|All Shariant Users|All Shariant users will be able to see this version| +|3rd Party Databases|This version is deemed ready to be exported to 3rd party databases such as Clinvar| + +See the Sharing section in the API for information on how to utilise these share levels. + +The purpose of Shariant is to share records. The lower share levels are intended for records that are awaiting review or more information - not as a permanent half sharing state. \ No newline at end of file diff --git a/docs/integration/basics/variant_matching.md b/docs/integration/basics/variant_matching.md new file mode 100644 index 0000000..52b520d --- /dev/null +++ b/docs/integration/basics/variant_matching.md @@ -0,0 +1,87 @@ +Variant Matching +================================================== + +TODO : The exact shariant field combinations we would like + +Scope +Shariant Variant Normalisation Process describes the variant normalisation protocol implemented for the Australian Genomics initiative, Shariant. It should be read in conjunction with Shariant Technical Overview. +Abbreviations + +Introduction +A variant is composed of the reference, position within the reference, and the base change. Examples include HGVS [1] and the chrom/pos/ref/alt fields of a VCF file [2]. Below shows the same variant represented in different ways: + +|Type|Example +|----|-------| +|HGVS coding DNA (c.)|NM_003607.3(CDC42BPA):c.325A>G| +|HGVS genomic (g.)|NC_000001.10:g.227400866T>C| +|VCF|1:227400866 T>C| + +It is critical to assign variants to a unique identifier, so that it is possible to link variants from different sources, such as external population databases like gnomAD, to track observations from different VCFs, or classifications from different labs. +Difficulties and solutions +HGVS protein changes + +Some medical scientists work with HGVS protein expressions (e.g. CDC42BPA:p.Asn109Asp). The issue with this notation is that there are often many different transcripts for a given gene, meaning the amino acid change could be at a different loci. There is the concept of “canonical transcripts” (a default transcript used when no transcript is specified) but canonical transcripts differ between labs and are often inconsistent over time. + +Even if the canonical transcript is specified, codon degeneracy (redundancy of base changes leading to same amino acid) means the nucleotide-level change cannot be distinguished. Determining the specific nucleotide change may be necessary (e.g. splicing). + +Shariant Solution: HGVS protein changes are collected as Evidence Keys in Shariant, but are not used to link a classification to a variant. A laboratory will be unable to submit a variant with the protein change alone. +VCF or HGVS g. do not specify transcript + +Genomic coordinates (such as VCF or HGVS g. notation) can uniquely resolve the base level changes, but may have different protein changes if there are different transcripts. The choice of transcript is significant for curation (e.g. splicing, pathogenicity prediction tools). + +Intergenic variants will not have a transcript. + +Shariant Solution: It is strongly recommended that classifications submitted with VCF or HGVS g. Evidence Keys, also include the versioned transcript identifier used for curation. Classifications without a transcript will be linked, but a warning will be provided if a variant is inside a gene. +Ambiguous build or transcript versions + +Not all VCF files specify the genome build. Some labs have stored classifications using VCF coordinates in custom databases but have not specified a build. Additionally, it is not always possible to check the build based on the variant submitted. + +Labs that use HGVS coding DNA notation often store the transcript (e.g. NM_003607:c.325A>G) but not the transcript version (NM_003607.3:c.325A>G). While unlikely, it is possible that a variant in a different version of the same transcript may refer to different base changes. + +Shariant Solution: Submission of a variant without a genome build being specified will not be allowed in Shariant and the record rejected. Additionally, submission as HGVS c. without a transcript version will be strongly discouraged. If a laboratory does not have a transcript version, then the latest version of a transcript will be used to load the variant, and a warning will be provided that this may not be correct. +Multiple representations within the same build +It is possible to describe a variant involving insertion and/or deletion in different ways for the same build or transcript versions. For instance, if the reference is “GGG” and a single G is deleted, this variant may be described as deleting either the first, second or third G. + +Conventions have appeared, such as representing the change in as few bases as possible, and left aligning (Tan et al [3] - which introduces a tool vt normalise) + +Regardless of the convention choses, this must remain consistent. + +Below shows a table (from ClinGen Allele Registry [4]) of left and right aligned variants: + +|Example Variant|Left Aligned|Right Aligned| +|---------------|------------|-------------| +|ACTG____TCGTG ACTGTAAGTCGTG|ACT____GTCGTG AACTGTAAGTCGTG|ACTGTAAGTCGTG ACTGT____CGTG| +|ACTGTCGTG ACTG___TG|ACTGTCGTG ACT___GTG|ACTGTCGTG ACTGT___G| +|GTTCACTGCTGCTGCATC GTTCACTG___CTGCATC|GTTCACTGCTGCTGCATC GTTCA___CTGCTGCATC|GTTCACTGCTGCTGCATC GTTCACTGCTGC___ATC| + +Popular variant callers (such as GATK) produce VCF files which are not normalised, though it is quite common for pipelines to use VT normalisation to left-align VCF files. + +HGVS nomenclature specifies right alignment. The table below (copied from ClinGen Allele Registry [4]) shows multiple HGVS expressions for the same change. + +|HGVS expressions|Unique CAid| +|----------------|-----------| +|NM_000277.1:c.1200‐1delG NM_000277.1:c.1200delG|CA229394| +|NM_017739.3:c.1895+1_1895+4delGTGA NM_017739.3:c.1895+5_1895+8delGTGA|CA263965| +|NM_005228.3:c.2284‐6delCinsCTCCAGGA AGCCT NM_005228.3:c.2284‐5_2290dupTCCAGG AAGCCT|CA135833| + +Shariant Solution: HGVS coordinates are converted into VCF coordinates, then all VCF (or VCF from HGVS) coordinates are run through VT normalise before linking to a variant. +Cross mapping between different genome builds + +It is not always possible to lift-over all variants to another build, with rates of ~97% being commonly reported. + +Newer builds have resolved some difficult sequences and introduced additional haplotypes (different reference sequences for regions of the genome that vary between human populations). Therefore, it may be possible that two distinct variants in one build liftover to become a single variant in another build, or vice versa. A naive liftover of two separate classifications from the same build may make those classifications discordant on another build. + +Shariant Solution: The ClinGen Allele Registry will be used to solve this problem by providing a globally unique ID (CAid) which can link variants across different builds. When a classification is imported which resolves to a novel variant in Shariant, an API request will be made to the ClinGen Allele Registry to retrieve or create an CAid for this variant. + +This variant classification discordance process implemented in Shariant will work against these CAids (please refer to Shariant Technical Overview). + +CAids can also be used as Evidence Keys to provide unambiguous linking of classifications to variants, and simplify submission to ClinVar. +References + +[1] Dunnen, J. T., Dalgleish, R. , Maglott, D. R., Hart, R. K., Greenblatt, M. S., McGowan‐Jordan, J. , Roux, A. , Smith, T. , Antonarakis, S. E. and Taschner, P. E. (2016), HGVS Recommendations for the Description of Sequence Variants: 2016 Update. Human Mutation, 37: 564-569. doi:10.1002/humu.22981 + +[2] The Variant Call Format (VCF) Version 4.2 Specification https://samtools.github.io/hts-specs/VCFv4.2.pdf + +[3] Adrian Tan, Gonçalo R. Abecasis, Hyun Min Kang; Unified representation of genetic variants, Bioinformatics, Volume 31, Issue 13, 1 July 2015, Pages 2202–2204, https://doi.org/10.1093/bioinformatics/btv112 + +[4] Pawliczek P, Patel RY, Ashmore LR, et al. ClinGen Allele Registry links information about genetic variants. Human Mutation. 2018;39:1690–1701. https://doi.org/10.1002/humu.23637 \ No newline at end of file diff --git a/docs/integration/evidence_keys/overview.md b/docs/integration/evidence_keys/overview.md new file mode 100644 index 0000000..a62b174 --- /dev/null +++ b/docs/integration/evidence_keys/overview.md @@ -0,0 +1,15 @@ +# Evidence Keys Overview + +A variant classification in Shariant is mostly comprised of evidence key values. + +[Evidence keys can be viewed here](https://shariant.org.au/variantclassification/evidence_keys) + +or if you have a Shariant login + +[Evidence keys can be viewed here](https://shariant.org.au/variantclassification/api/evidence_keys) + +Some extra notes + +* Evidence keys are flat, e.g. a key will not be nested within another key. +* Each key is of a certain type, see [Evidence Key Types](types.md). +* Each evidence key supports a free text "note". \ No newline at end of file diff --git a/docs/integration/evidence_keys/types.md b/docs/integration/evidence_keys/types.md new file mode 100644 index 0000000..c56d847 --- /dev/null +++ b/docs/integration/evidence_keys/types.md @@ -0,0 +1,52 @@ +# Evidence Key Types + +A key's value type will determine how it will behave: + +### (F) free-text +Free text, no validation applies, avoid new line characters if possible. + +### (T) free-text multi-line +Free text, no validation applies, can contain new line characters. + +### (S) select +The key will also contain an array of options. To submit a value, provide a value that matches an option's key. If the entry also has `allows_custom_values` then any value will be accepted. + +### (M) multi-select +As per the normal select, the key should also contain an array of options. To submit a value, provide either a json array or a comma separated string. The elements in the array or string should match the keys of options. If the entry also has `allows_custom_values` then any value will be accepted, and standard values and custom values can be intermixed. +The ordering of values from submission will not be maintained. + +When viewing records, the value will be an array of option keys, unless there were no values selected in which case the value will be `null`. + +### (B) boolean +Provide a json boolean value, or alternatively "true" or "false" case insensitive. +Be aware that the web form is unable to distinguish between false and no value. + +### (D) date +Provide a string in the format of yyyy-MM-dd e.g. Jan 2nd 1997 would be "1997-01-02". Time parts are not supported. + +### (C) criteria +An ACMG criteria e.g. PVS1, PS1, PS2. Accepts values of +* "NM" for Not Met +* "BS" for Benign Strong +* "BP" for Benign Supporting +* "BA" for Benign Standalone +* "PP" for Pathogenic Supporting +* "PM" for Pathogenic Moderate +* "PS" for Pathogenic Strong +* "PVS" for Pathogenic Very Strong +* true for the default strength of of the criteria + +### (L) float +_sorry (F) was taken_ accepts any number, if valid will be stored as a float. + +### (I) integer +Accepts any whole number, if valid will be stored as an int. + +### (N) unit +Accepts a number between 0 and 1, if valid will be stored as a float. + +### (P) percent +Accepts a number between 0 and 100, if valid will be stored as a float + +### (U) user +Provide the email address of a user. Typically this will be for the owner of a record, in which case it's more important for it to be the user who will login to the system to fix the issues rather than the person who created the classification. \ No newline at end of file diff --git a/docs/make.bat b/docs/make.bat new file mode 100755 index 0000000..fec43bb --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\complexity.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\complexity.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/docs/site/classification_diffs.md b/docs/site/classification_diffs.md new file mode 100644 index 0000000..5f3bfe5 --- /dev/null +++ b/docs/site/classification_diffs.md @@ -0,0 +1,3 @@ +# Classification Diffs + +Diffs \ No newline at end of file diff --git a/docs/site/classification_discordance.md b/docs/site/classification_discordance.md new file mode 100644 index 0000000..646b590 --- /dev/null +++ b/docs/site/classification_discordance.md @@ -0,0 +1,3 @@ +# Classification Discordance + +TODO \ No newline at end of file diff --git a/docs/site/classification_form.md b/docs/site/classification_form.md new file mode 100644 index 0000000..f3ac856 --- /dev/null +++ b/docs/site/classification_form.md @@ -0,0 +1,3 @@ +# Classification Form + +Form \ No newline at end of file diff --git a/docs/site/classification_listing.md b/docs/site/classification_listing.md new file mode 100644 index 0000000..b083234 --- /dev/null +++ b/docs/site/classification_listing.md @@ -0,0 +1,3 @@ +# Classification Listing + +TODO \ No newline at end of file diff --git a/docs/site/users.md b/docs/site/users.md new file mode 100644 index 0000000..f6363f3 --- /dev/null +++ b/docs/site/users.md @@ -0,0 +1,3 @@ +# Users + +TODO \ No newline at end of file